You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by zh...@apache.org on 2018/01/04 21:10:14 UTC

[incubator-mxnet-site] branch asf-site updated: Nightly build

This is an automated email from the ASF dual-hosted git repository.

zhasheng pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 9cd5a1c  Nightly build
9cd5a1c is described below

commit 9cd5a1cf2c96e6d516227e4d95eda5de1d60eab8
Author: mxnet-ci <mxnet-ci>
AuthorDate: Thu Jan 4 21:10:06 2018 +0000

    Nightly build
---
 .github/PULL_REQUEST_TEMPLATE.md                   |   12 +-
 versions/master/README.html                        |    2 +-
 versions/master/_modules/index.html                |  274 ++---
 versions/master/_modules/mxnet/autograd.html       |   16 +-
 versions/master/_modules/mxnet/callback.html       |    2 +-
 versions/master/_modules/mxnet/executor.html       |    2 +-
 .../master/_modules/mxnet/executor_manager.html    |    2 +-
 versions/master/_modules/mxnet/gluon/block.html    |    2 +-
 .../mxnet/gluon/contrib/rnn/conv_rnn_cell.html     |    2 +-
 .../_modules/mxnet/gluon/contrib/rnn/rnn_cell.html |    2 +-
 .../_modules/mxnet/gluon/data/dataloader.html      |   15 +-
 .../master/_modules/mxnet/gluon/data/dataset.html  |    6 +-
 .../master/_modules/mxnet/gluon/data/sampler.html  |    2 +-
 .../master/_modules/mxnet/gluon/data/vision.html   |    2 +-
 versions/master/_modules/mxnet/gluon/loss.html     |    2 +-
 .../_modules/mxnet/gluon/model_zoo/vision.html     |    6 +-
 .../mxnet/gluon/model_zoo/vision/alexnet.html      |    2 +-
 .../mxnet/gluon/model_zoo/vision/densenet.html     |    2 +-
 .../mxnet/gluon/model_zoo/vision/inception.html    |    2 +-
 .../mxnet/gluon/model_zoo/vision/mobilenet.html    |    4 +-
 .../mxnet/gluon/model_zoo/vision/resnet.html       |    6 +-
 .../mxnet/gluon/model_zoo/vision/squeezenet.html   |    2 +-
 .../_modules/mxnet/gluon/model_zoo/vision/vgg.html |    2 +-
 .../_modules/mxnet/gluon/nn/basic_layers.html      |    2 +-
 .../_modules/mxnet/gluon/nn/conv_layers.html       |   11 +-
 .../master/_modules/mxnet/gluon/parameter.html     |    4 +-
 .../master/_modules/mxnet/gluon/rnn/rnn_cell.html  |    2 +-
 .../master/_modules/mxnet/gluon/rnn/rnn_layer.html |    2 +-
 versions/master/_modules/mxnet/gluon/trainer.html  |    7 +-
 versions/master/_modules/mxnet/gluon/utils.html    |    2 +-
 .../master/_modules/mxnet/image/detection.html     |   22 +-
 versions/master/_modules/mxnet/image/image.html    |    2 +-
 versions/master/_modules/mxnet/initializer.html    |    2 +-
 versions/master/_modules/mxnet/io.html             |   29 +-
 versions/master/_modules/mxnet/kvstore.html        |    2 +-
 versions/master/_modules/mxnet/lr_scheduler.html   |    2 +-
 versions/master/_modules/mxnet/metric.html         |    2 +-
 versions/master/_modules/mxnet/model.html          |    2 +-
 .../master/_modules/mxnet/module/base_module.html  |    2 +-
 .../_modules/mxnet/module/bucketing_module.html    |    2 +-
 versions/master/_modules/mxnet/module/module.html  |    2 +-
 .../_modules/mxnet/module/python_module.html       |    2 +-
 .../_modules/mxnet/module/sequential_module.html   |    2 +-
 .../master/_modules/mxnet/ndarray/contrib.html     |    2 +-
 .../master/_modules/mxnet/ndarray/ndarray.html     |    2 +-
 versions/master/_modules/mxnet/ndarray/random.html |    2 +-
 versions/master/_modules/mxnet/ndarray/sparse.html |    2 +-
 versions/master/_modules/mxnet/ndarray/utils.html  |    2 +-
 versions/master/_modules/mxnet/optimizer.html      |   70 +-
 versions/master/_modules/mxnet/random.html         |    2 +-
 versions/master/_modules/mxnet/recordio.html       |    2 +-
 versions/master/_modules/mxnet/rnn/io.html         |    2 +-
 versions/master/_modules/mxnet/rnn/rnn.html        |    2 +-
 versions/master/_modules/mxnet/rnn/rnn_cell.html   |    2 +-
 versions/master/_modules/mxnet/rtc.html            |    2 +-
 versions/master/_modules/mxnet/symbol/contrib.html |    2 +-
 versions/master/_modules/mxnet/symbol/random.html  |    2 +-
 versions/master/_modules/mxnet/symbol/symbol.html  |   32 +-
 versions/master/_modules/symbol.html               |    2 +-
 .../master/_sources/api/python/gluon/model_zoo.txt |   35 +
 .../master/_sources/api/python/ndarray/sparse.txt  |    3 +-
 versions/master/_sources/faq/index.txt             |    2 -
 .../master/_sources/install/build_from_source.txt  |   37 +
 versions/master/_sources/install/index.txt         |   50 +-
 versions/master/_sources/install/windows_setup.txt |   26 +-
 .../master/_sources/tutorials/basic/image_io.txt   |   19 +-
 .../master/_sources/tutorials/basic/module.txt     |   87 ++
 versions/master/_sources/tutorials/c++/basics.txt  |   17 +-
 versions/master/_sources/tutorials/gluon/mnist.txt |    2 +-
 versions/master/_sources/tutorials/index.txt       |   11 +-
 versions/master/_sources/tutorials/nlp/cnn.txt     |  162 +--
 .../tutorials/python/linear-regression.txt         |    5 +-
 .../master/_sources/tutorials/python/mnist.txt     |    2 +-
 versions/master/_sources/tutorials/sparse/csr.txt  |  181 +++
 .../_sources/tutorials/sparse/row_sparse.txt       |  181 ++-
 .../master/_sources/tutorials/sparse/train.txt     |   68 +-
 .../tutorials/unsupervised_learning/gan.txt        |  162 +--
 versions/master/_static/mxnet-theme/index.html     |   22 +-
 versions/master/_static/mxnet-theme/layout.html    |    2 +-
 versions/master/_static/mxnet.css                  |    4 +
 versions/master/api/c++/index.html                 |    2 +-
 versions/master/api/julia/index.html               |    2 +-
 versions/master/api/perl/index.html                |    2 +-
 versions/master/api/perl/io.html                   |    2 +-
 versions/master/api/perl/kvstore.html              |    2 +-
 versions/master/api/perl/module.html               |    2 +-
 versions/master/api/perl/ndarray.html              |    2 +-
 versions/master/api/perl/symbol.html               |    2 +-
 versions/master/api/python/autograd/autograd.html  |   15 +-
 versions/master/api/python/callback/callback.html  |    2 +-
 versions/master/api/python/executor/executor.html  |    2 +-
 versions/master/api/python/gluon/contrib.html      |    2 +-
 versions/master/api/python/gluon/data.html         |    2 +-
 versions/master/api/python/gluon/gluon.html        |    2 +-
 versions/master/api/python/gluon/loss.html         |    2 +-
 versions/master/api/python/gluon/model_zoo.html    |  242 +++-
 versions/master/api/python/gluon/nn.html           |    2 +-
 versions/master/api/python/gluon/rnn.html          |    2 +-
 versions/master/api/python/image/image.html        |    2 +-
 versions/master/api/python/index.html              |    2 +-
 versions/master/api/python/io/io.html              |    6 +-
 versions/master/api/python/kvstore/kvstore.html    |    2 +-
 versions/master/api/python/metric/metric.html      |    2 +-
 versions/master/api/python/model.html              |   25 +-
 versions/master/api/python/module/module.html      |    2 +-
 versions/master/api/python/ndarray/contrib.html    |  250 ++++-
 versions/master/api/python/ndarray/linalg.html     |    2 +-
 versions/master/api/python/ndarray/ndarray.html    |  222 ++--
 versions/master/api/python/ndarray/random.html     |    2 +-
 versions/master/api/python/ndarray/sparse.html     |   49 +-
 .../api/python/optimization/optimization.html      |   25 +-
 versions/master/api/python/rtc/rtc.html            |    2 +-
 versions/master/api/python/symbol/contrib.html     |  246 ++++-
 versions/master/api/python/symbol/linalg.html      |    2 +-
 versions/master/api/python/symbol/random.html      |    2 +-
 versions/master/api/python/symbol/rnn.html         |    2 +-
 versions/master/api/python/symbol/sparse.html      |   45 +-
 versions/master/api/python/symbol/symbol.html      |  244 +++--
 .../symbol_in_pictures/symbol_in_pictures.html     |    2 +-
 versions/master/api/r/index.html                   |    2 +-
 .../api/scala/docs/ml/dmlc/mxnet/DataIter.html     |   54 +-
 .../api/scala/docs/ml/dmlc/mxnet/DataPack.html     |    4 +-
 .../docs/ml/dmlc/mxnet/io/PrefetchingIter.html     |   58 +-
 versions/master/api/scala/index.html               |    2 +-
 versions/master/api/scala/io.html                  |    2 +-
 versions/master/api/scala/kvstore.html             |    2 +-
 versions/master/api/scala/model.html               |    2 +-
 versions/master/api/scala/module.html              |    2 +-
 versions/master/api/scala/ndarray.html             |    2 +-
 versions/master/api/scala/symbol.html              |    2 +-
 versions/master/api/scala/symbol_in_pictures.html  |    2 +-
 versions/master/architecture/index.html            |    2 +-
 .../master/architecture/note_data_loading.html     |    2 +-
 versions/master/architecture/note_engine.html      |    2 +-
 versions/master/architecture/note_memory.html      |    2 +-
 versions/master/architecture/overview.html         |    2 +-
 versions/master/architecture/program_model.html    |    2 +-
 versions/master/architecture/release_note_0_9.html |    2 +-
 versions/master/architecture/rnn_interface.html    |    2 +-
 versions/master/community/contribute.html          |    2 +-
 versions/master/community/index.html               |    4 +-
 versions/master/community/mxnet_channels.html      |    2 +-
 versions/master/community/powered_by.html          |    2 +-
 versions/master/doxygen/annotated.html             |   26 +-
 versions/master/doxygen/base_8h.html               |    8 +-
 versions/master/doxygen/base_8h__dep__incl.map     |   31 +-
 versions/master/doxygen/base_8h__dep__incl.md5     |    2 +-
 versions/master/doxygen/base_8h__dep__incl.png     |  Bin 121805 -> 128986 bytes
 versions/master/doxygen/base_8h_source.html        |    4 +-
 versions/master/doxygen/c__api_8h.html             |   39 +-
 versions/master/doxygen/c__api_8h_source.html      |  860 +++++++--------
 versions/master/doxygen/c__predict__api_8h.html    |    2 +-
 .../master/doxygen/c__predict__api_8h_source.html  |    2 +-
 ...ieldEntry_3_01mxnet_1_1TShape_01_4-members.html |    2 +-
 ...ter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html |    2 +-
 versions/master/doxygen/classes.html               |   59 +-
 .../doxygen/classmxnet_1_1Engine-members.html      |    2 +-
 versions/master/doxygen/classmxnet_1_1Engine.html  |    2 +-
 .../doxygen/classmxnet_1_1Executor-members.html    |    2 +-
 .../master/doxygen/classmxnet_1_1Executor.html     |    2 +-
 .../doxygen/classmxnet_1_1IIterator-members.html   |    2 +-
 .../master/doxygen/classmxnet_1_1IIterator.html    |    2 +-
 .../doxygen/classmxnet_1_1Imperative-members.html  |    2 +-
 .../master/doxygen/classmxnet_1_1Imperative.html   |    2 +-
 ...classmxnet_1_1Imperative_1_1AGInfo-members.html |    2 +-
 .../classmxnet_1_1Imperative_1_1AGInfo.html        |    2 +-
 ...assmxnet_1_1Imperative_1_1CachedOp-members.html |    2 +-
 .../classmxnet_1_1Imperative_1_1CachedOp.html      |    2 +-
 .../doxygen/classmxnet_1_1KVStore-members.html     |    2 +-
 versions/master/doxygen/classmxnet_1_1KVStore.html |    2 +-
 .../doxygen/classmxnet_1_1NDArray-members.html     |    2 +-
 versions/master/doxygen/classmxnet_1_1NDArray.html |    4 +-
 .../doxygen/classmxnet_1_1OpStatePtr-members.html  |    2 +-
 .../master/doxygen/classmxnet_1_1OpStatePtr.html   |    2 +-
 .../doxygen/classmxnet_1_1Operator-members.html    |    2 +-
 .../master/doxygen/classmxnet_1_1Operator.html     |    2 +-
 .../classmxnet_1_1OperatorProperty-members.html    |    2 +-
 .../doxygen/classmxnet_1_1OperatorProperty.html    |    2 +-
 .../classmxnet_1_1ResourceManager-members.html     |    2 +-
 .../doxygen/classmxnet_1_1ResourceManager.html     |    2 +-
 .../doxygen/classmxnet_1_1Storage-members.html     |    2 +-
 versions/master/doxygen/classmxnet_1_1Storage.html |    4 +-
 .../doxygen/classmxnet_1_1TBlob-members.html       |    2 +-
 versions/master/doxygen/classmxnet_1_1TBlob.html   |    2 +-
 ...smxnet_1_1common_1_1LazyAllocArray-members.html |    2 +-
 .../classmxnet_1_1common_1_1LazyAllocArray.html    |    2 +-
 ...classmxnet_1_1common_1_1ObjectPool-members.html |    2 +-
 .../classmxnet_1_1common_1_1ObjectPool.html        |    2 +-
 ...xnet_1_1common_1_1random_1_1RandGenerator.html} |   25 +-
 ...Generator_3_01cpu_00_01DType_01_4-members.html} |   18 +-
 ...m_1_1RandGenerator_3_01cpu_00_01DType_01_4.html |  261 +++++
 ...r_3_01cpu_00_01DType_01_4_1_1Impl-members.html} |   20 +-
 ...dGenerator_3_01cpu_00_01DType_01_4_1_1Impl.html |  302 +++++
 ..._01cpu_00_01DType_01_4_1_1Impl__coll__graph.map |    2 +
 ..._01cpu_00_01DType_01_4_1_1Impl__coll__graph.md5 |    1 +
 ..._01cpu_00_01DType_01_4_1_1Impl__coll__graph.png |  Bin 0 -> 5725 bytes
 ...erator_3_01cpu_00_01DType_01_4__coll__graph.map |    2 +
 ...erator_3_01cpu_00_01DType_01_4__coll__graph.md5 |    1 +
 ...erator_3_01cpu_00_01DType_01_4__coll__graph.png |  Bin 0 -> 8233 bytes
 ...Generator_3_01gpu_00_01DType_01_4-members.html} |   18 +-
 ..._1_1RandGenerator_3_01gpu_00_01DType_01_4.html} |  158 +--
 ...r_3_01gpu_00_01DType_01_4_1_1Impl-members.html} |   20 +-
 ...dGenerator_3_01gpu_00_01DType_01_4_1_1Impl.html |  307 ++++++
 ..._01gpu_00_01DType_01_4_1_1Impl__coll__graph.map |    2 +
 ..._01gpu_00_01DType_01_4_1_1Impl__coll__graph.md5 |    1 +
 ..._01gpu_00_01DType_01_4_1_1Impl__coll__graph.png |  Bin 0 -> 6083 bytes
 ...erator_3_01gpu_00_01DType_01_4__coll__graph.map |    2 +
 ...erator_3_01gpu_00_01DType_01_4__coll__graph.md5 |    1 +
 ...erator_3_01gpu_00_01DType_01_4__coll__graph.png |  Bin 0 -> 8325 bytes
 ...enerator_3_01gpu_00_01double_01_4-members.html} |    9 +-
 ...1_1RandGenerator_3_01gpu_00_01double_01_4.html} |   40 +-
 ..._3_01gpu_00_01double_01_4_1_1Impl-members.html} |   20 +-
 ...Generator_3_01gpu_00_01double_01_4_1_1Impl.html |  293 +++++
 ...01gpu_00_01double_01_4_1_1Impl__coll__graph.map |    2 +
 ...01gpu_00_01double_01_4_1_1Impl__coll__graph.md5 |    1 +
 ...01gpu_00_01double_01_4_1_1Impl__coll__graph.png |  Bin 0 -> 5798 bytes
 ...rator_3_01gpu_00_01double_01_4__coll__graph.map |    2 +
 ...rator_3_01gpu_00_01double_01_4__coll__graph.md5 |    1 +
 ...rator_3_01gpu_00_01double_01_4__coll__graph.png |  Bin 0 -> 3550 bytes
 ...mon_1_1random_1_1RandGenerator__coll__graph.map |    2 +
 ...mon_1_1random_1_1RandGenerator__coll__graph.md5 |    1 +
 ...mon_1_1random_1_1RandGenerator__coll__graph.png |  Bin 0 -> 5047 bytes
 ...et_1_1engine_1_1CallbackOnComplete-members.html |    2 +-
 ...classmxnet_1_1engine_1_1CallbackOnComplete.html |    2 +-
 ...assmxnet_1_1op_1_1SimpleOpRegEntry-members.html |    2 +-
 .../classmxnet_1_1op_1_1SimpleOpRegEntry.html      |    2 +-
 ...assmxnet_1_1op_1_1SimpleOpRegistry-members.html |    2 +-
 .../classmxnet_1_1op_1_1SimpleOpRegistry.html      |    2 +-
 .../classmxnet_1_1rtc_1_1CudaModule-members.html   |    2 +-
 .../doxygen/classmxnet_1_1rtc_1_1CudaModule.html   |    2 +-
 ...net_1_1rtc_1_1CudaModule_1_1Kernel-members.html |    2 +-
 .../classmxnet_1_1rtc_1_1CudaModule_1_1Kernel.html |    2 +-
 versions/master/doxygen/cuda__utils_8h.html        |    9 +-
 .../master/doxygen/cuda__utils_8h__dep__incl.map   |   12 +
 .../master/doxygen/cuda__utils_8h__dep__incl.md5   |    1 +
 .../master/doxygen/cuda__utils_8h__dep__incl.png   |  Bin 0 -> 39440 bytes
 versions/master/doxygen/cuda__utils_8h_source.html |    2 +-
 .../doxygen/{index.html => dir_000000_000003.html} |   18 +-
 .../doxygen/{index.html => dir_000001_000002.html} |   18 +-
 ...297ef4c3efbcdba.html => dir_000002_000001.html} |   23 +-
 versions/master/doxygen/dir_000003_000000.html     |    4 +-
 .../dir_1143c7affb9ebd026cb6818dd282def7.html      |    4 +-
 .../dir_1143c7affb9ebd026cb6818dd282def7_dep.map   |    2 +
 .../dir_1143c7affb9ebd026cb6818dd282def7_dep.md5   |    2 +-
 .../dir_1143c7affb9ebd026cb6818dd282def7_dep.png   |  Bin 2013 -> 2918 bytes
 .../dir_68267d1309a1af8e8297ef4c3efbcdba.html      |    4 +-
 .../dir_68267d1309a1af8e8297ef4c3efbcdba_dep.map   |    3 +-
 .../dir_68267d1309a1af8e8297ef4c3efbcdba_dep.md5   |    2 +-
 .../dir_68267d1309a1af8e8297ef4c3efbcdba_dep.png   |  Bin 2986 -> 3705 bytes
 .../dir_d44c64559bbebec7f509842c48db8b23.html      |    4 +-
 .../dir_d44c64559bbebec7f509842c48db8b23_dep.map   |    3 +
 .../dir_d44c64559bbebec7f509842c48db8b23_dep.md5   |    2 +-
 .../dir_d44c64559bbebec7f509842c48db8b23_dep.png   |  Bin 1787 -> 3716 bytes
 .../dir_fdedb0aba14d44ce9d99bc100e026e6a.html      |    7 +-
 .../dir_fdedb0aba14d44ce9d99bc100e026e6a_dep.map   |    2 +-
 .../dir_fdedb0aba14d44ce9d99bc100e026e6a_dep.md5   |    2 +-
 .../dir_fdedb0aba14d44ce9d99bc100e026e6a_dep.png   |  Bin 2936 -> 2995 bytes
 versions/master/doxygen/engine_8h.html             |    2 +-
 versions/master/doxygen/engine_8h_source.html      |    2 +-
 versions/master/doxygen/exec__utils_8h.html        |    4 +-
 versions/master/doxygen/exec__utils_8h__incl.map   |   18 +-
 versions/master/doxygen/exec__utils_8h__incl.md5   |    2 +-
 versions/master/doxygen/exec__utils_8h__incl.png   |  Bin 274753 -> 307779 bytes
 versions/master/doxygen/exec__utils_8h_source.html |    2 +-
 versions/master/doxygen/executor_8h.html           |    4 +-
 versions/master/doxygen/executor_8h__incl.map      |   18 +-
 versions/master/doxygen/executor_8h__incl.md5      |    2 +-
 versions/master/doxygen/executor_8h__incl.png      |  Bin 247958 -> 257399 bytes
 versions/master/doxygen/executor_8h_source.html    |    2 +-
 versions/master/doxygen/files.html                 |    7 +-
 versions/master/doxygen/functions.html             |    2 +-
 versions/master/doxygen/functions__.html           |    2 +-
 versions/master/doxygen/functions_a.html           |    6 +-
 versions/master/doxygen/functions_b.html           |    2 +-
 versions/master/doxygen/functions_c.html           |    2 +-
 versions/master/doxygen/functions_d.html           |    2 +-
 versions/master/doxygen/functions_e.html           |    2 +-
 versions/master/doxygen/functions_enum.html        |    2 +-
 versions/master/doxygen/functions_eval.html        |    5 +-
 versions/master/doxygen/functions_f.html           |    9 +-
 versions/master/doxygen/functions_func.html        |    3 +-
 versions/master/doxygen/functions_func_a.html      |    7 +-
 versions/master/doxygen/functions_func_b.html      |    3 +-
 versions/master/doxygen/functions_func_c.html      |    3 +-
 versions/master/doxygen/functions_func_d.html      |    3 +-
 versions/master/doxygen/functions_func_e.html      |    3 +-
 versions/master/doxygen/functions_func_f.html      |    7 +-
 versions/master/doxygen/functions_func_g.html      |    6 +-
 versions/master/doxygen/functions_func_i.html      |    8 +-
 versions/master/doxygen/functions_func_l.html      |    3 +-
 versions/master/doxygen/functions_func_m.html      |    3 +-
 versions/master/doxygen/functions_func_n.html      |    8 +-
 versions/master/doxygen/functions_func_o.html      |   10 +-
 versions/master/doxygen/functions_func_p.html      |    3 +-
 versions/master/doxygen/functions_func_r.html      |    8 +-
 versions/master/doxygen/functions_func_s.html      |   23 +-
 versions/master/doxygen/functions_func_t.html      |    3 +-
 .../{functions_func.html => functions_func_u.html} |   18 +-
 versions/master/doxygen/functions_func_v.html      |    3 +-
 versions/master/doxygen/functions_func_w.html      |    3 +-
 versions/master/doxygen/functions_func_~.html      |    7 +-
 versions/master/doxygen/functions_g.html           |    5 +-
 versions/master/doxygen/functions_i.html           |    7 +-
 versions/master/doxygen/functions_k.html           |   13 +-
 versions/master/doxygen/functions_l.html           |    2 +-
 versions/master/doxygen/functions_m.html           |    2 +-
 versions/master/doxygen/functions_n.html           |    7 +-
 versions/master/doxygen/functions_o.html           |    9 +-
 versions/master/doxygen/functions_p.html           |    2 +-
 versions/master/doxygen/functions_r.html           |    9 +-
 versions/master/doxygen/functions_rela.html        |    2 +-
 versions/master/doxygen/functions_s.html           |   14 +-
 versions/master/doxygen/functions_t.html           |    6 +-
 versions/master/doxygen/functions_type.html        |    5 +-
 versions/master/doxygen/functions_u.html           |    7 +-
 versions/master/doxygen/functions_v.html           |    2 +-
 versions/master/doxygen/functions_vars.html        |   10 +-
 versions/master/doxygen/functions_w.html           |    2 +-
 versions/master/doxygen/functions_~.html           |    6 +-
 versions/master/doxygen/globals.html               |    2 +-
 versions/master/doxygen/globals_c.html             |    2 +-
 versions/master/doxygen/globals_d.html             |    2 +-
 versions/master/doxygen/globals_defs.html          |    2 +-
 versions/master/doxygen/globals_e.html             |    2 +-
 versions/master/doxygen/globals_enum.html          |    2 +-
 versions/master/doxygen/globals_eval.html          |    2 +-
 versions/master/doxygen/globals_f.html             |    2 +-
 versions/master/doxygen/globals_func.html          |    5 +-
 versions/master/doxygen/globals_g.html             |    2 +-
 versions/master/doxygen/globals_k.html             |    2 +-
 versions/master/doxygen/globals_m.html             |    5 +-
 versions/master/doxygen/globals_n.html             |    2 +-
 versions/master/doxygen/globals_p.html             |    2 +-
 versions/master/doxygen/globals_r.html             |    2 +-
 versions/master/doxygen/globals_s.html             |    2 +-
 versions/master/doxygen/globals_type.html          |    2 +-
 versions/master/doxygen/graph__attr__types_8h.html |    4 +-
 .../master/doxygen/graph__attr__types_8h__incl.map |   14 +-
 .../master/doxygen/graph__attr__types_8h__incl.md5 |    2 +-
 .../master/doxygen/graph__attr__types_8h__incl.png |  Bin 123223 -> 165738 bytes
 .../doxygen/graph__attr__types_8h_source.html      |    2 +-
 versions/master/doxygen/graph_legend.html          |    2 +-
 versions/master/doxygen/hierarchy.html             |   65 +-
 versions/master/doxygen/imperative_8h.html         |    4 +-
 versions/master/doxygen/imperative_8h__incl.map    |   18 +-
 versions/master/doxygen/imperative_8h__incl.md5    |    2 +-
 versions/master/doxygen/imperative_8h__incl.png    |  Bin 164688 -> 213810 bytes
 versions/master/doxygen/imperative_8h_source.html  |    2 +-
 versions/master/doxygen/index.html                 |    2 +-
 versions/master/doxygen/inherit_graph_11.map       |    2 +-
 versions/master/doxygen/inherit_graph_11.md5       |    2 +-
 versions/master/doxygen/inherit_graph_11.png       |  Bin 2276 -> 3496 bytes
 versions/master/doxygen/inherit_graph_12.map       |    2 +-
 versions/master/doxygen/inherit_graph_12.md5       |    2 +-
 versions/master/doxygen/inherit_graph_12.png       |  Bin 2882 -> 3588 bytes
 versions/master/doxygen/inherit_graph_13.map       |    2 +-
 versions/master/doxygen/inherit_graph_13.md5       |    2 +-
 versions/master/doxygen/inherit_graph_13.png       |  Bin 1320 -> 5035 bytes
 versions/master/doxygen/inherit_graph_14.map       |    2 +-
 versions/master/doxygen/inherit_graph_14.md5       |    2 +-
 versions/master/doxygen/inherit_graph_14.png       |  Bin 1629 -> 3391 bytes
 versions/master/doxygen/inherit_graph_15.map       |    2 +-
 versions/master/doxygen/inherit_graph_15.md5       |    2 +-
 versions/master/doxygen/inherit_graph_15.png       |  Bin 1300 -> 3423 bytes
 versions/master/doxygen/inherit_graph_16.map       |    4 +-
 versions/master/doxygen/inherit_graph_16.md5       |    2 +-
 versions/master/doxygen/inherit_graph_16.png       |  Bin 10208 -> 3672 bytes
 versions/master/doxygen/inherit_graph_17.map       |    2 +-
 versions/master/doxygen/inherit_graph_17.md5       |    2 +-
 versions/master/doxygen/inherit_graph_17.png       |  Bin 1135 -> 3760 bytes
 versions/master/doxygen/inherit_graph_18.map       |    2 +-
 versions/master/doxygen/inherit_graph_18.md5       |    2 +-
 versions/master/doxygen/inherit_graph_18.png       |  Bin 2764 -> 2276 bytes
 versions/master/doxygen/inherit_graph_19.map       |    2 +-
 versions/master/doxygen/inherit_graph_19.md5       |    2 +-
 versions/master/doxygen/inherit_graph_19.png       |  Bin 1436 -> 2882 bytes
 versions/master/doxygen/inherit_graph_20.map       |    2 +-
 versions/master/doxygen/inherit_graph_20.md5       |    2 +-
 versions/master/doxygen/inherit_graph_20.png       |  Bin 2628 -> 1320 bytes
 versions/master/doxygen/inherit_graph_21.map       |    2 +-
 versions/master/doxygen/inherit_graph_21.md5       |    2 +-
 versions/master/doxygen/inherit_graph_21.png       |  Bin 1505 -> 1629 bytes
 versions/master/doxygen/inherit_graph_22.map       |    2 +-
 versions/master/doxygen/inherit_graph_22.md5       |    2 +-
 versions/master/doxygen/inherit_graph_22.png       |  Bin 2278 -> 1300 bytes
 versions/master/doxygen/inherit_graph_23.map       |    4 +-
 versions/master/doxygen/inherit_graph_23.md5       |    2 +-
 versions/master/doxygen/inherit_graph_23.png       |  Bin 2581 -> 10208 bytes
 versions/master/doxygen/inherit_graph_24.map       |    2 +-
 versions/master/doxygen/inherit_graph_24.md5       |    2 +-
 versions/master/doxygen/inherit_graph_24.png       |  Bin 1523 -> 1135 bytes
 versions/master/doxygen/inherit_graph_25.map       |    2 +-
 versions/master/doxygen/inherit_graph_25.md5       |    2 +-
 versions/master/doxygen/inherit_graph_25.png       |  Bin 1571 -> 2764 bytes
 versions/master/doxygen/inherit_graph_26.map       |    2 +-
 versions/master/doxygen/inherit_graph_26.md5       |    2 +-
 versions/master/doxygen/inherit_graph_26.png       |  Bin 2201 -> 1436 bytes
 versions/master/doxygen/inherit_graph_27.map       |    6 +-
 versions/master/doxygen/inherit_graph_27.md5       |    2 +-
 versions/master/doxygen/inherit_graph_27.png       |  Bin 14689 -> 2628 bytes
 versions/master/doxygen/inherit_graph_28.map       |    2 +-
 versions/master/doxygen/inherit_graph_28.md5       |    2 +-
 versions/master/doxygen/inherit_graph_28.png       |  Bin 2473 -> 1505 bytes
 versions/master/doxygen/inherit_graph_29.map       |    2 +-
 versions/master/doxygen/inherit_graph_29.md5       |    2 +-
 versions/master/doxygen/inherit_graph_29.png       |  Bin 2317 -> 2278 bytes
 versions/master/doxygen/inherit_graph_30.map       |    2 +-
 versions/master/doxygen/inherit_graph_30.md5       |    2 +-
 versions/master/doxygen/inherit_graph_30.png       |  Bin 1569 -> 2581 bytes
 versions/master/doxygen/inherit_graph_31.map       |    2 +-
 versions/master/doxygen/inherit_graph_31.md5       |    2 +-
 versions/master/doxygen/inherit_graph_31.png       |  Bin 1514 -> 1523 bytes
 versions/master/doxygen/inherit_graph_32.map       |    2 +-
 versions/master/doxygen/inherit_graph_32.md5       |    2 +-
 versions/master/doxygen/inherit_graph_32.png       |  Bin 1837 -> 1571 bytes
 versions/master/doxygen/inherit_graph_33.map       |    2 +-
 versions/master/doxygen/inherit_graph_33.md5       |    2 +-
 versions/master/doxygen/inherit_graph_33.png       |  Bin 1656 -> 2201 bytes
 versions/master/doxygen/inherit_graph_34.map       |    6 +-
 versions/master/doxygen/inherit_graph_34.md5       |    2 +-
 versions/master/doxygen/inherit_graph_34.png       |  Bin 1478 -> 14689 bytes
 versions/master/doxygen/inherit_graph_35.map       |    2 +-
 versions/master/doxygen/inherit_graph_35.md5       |    2 +-
 versions/master/doxygen/inherit_graph_35.png       |  Bin 2073 -> 2473 bytes
 versions/master/doxygen/inherit_graph_36.map       |    2 +-
 versions/master/doxygen/inherit_graph_36.md5       |    2 +-
 versions/master/doxygen/inherit_graph_36.png       |  Bin 1712 -> 2317 bytes
 versions/master/doxygen/inherit_graph_37.map       |    2 +-
 versions/master/doxygen/inherit_graph_37.md5       |    2 +-
 versions/master/doxygen/inherit_graph_37.png       |  Bin 1917 -> 1569 bytes
 versions/master/doxygen/inherit_graph_38.map       |    2 +-
 versions/master/doxygen/inherit_graph_38.md5       |    2 +-
 versions/master/doxygen/inherit_graph_38.png       |  Bin 3035 -> 1514 bytes
 versions/master/doxygen/inherit_graph_39.map       |    2 +-
 versions/master/doxygen/inherit_graph_39.md5       |    2 +-
 versions/master/doxygen/inherit_graph_39.png       |  Bin 2562 -> 1837 bytes
 versions/master/doxygen/inherit_graph_40.map       |    2 +-
 versions/master/doxygen/inherit_graph_40.md5       |    2 +-
 versions/master/doxygen/inherit_graph_40.png       |  Bin 1595 -> 1656 bytes
 versions/master/doxygen/inherit_graph_41.map       |    2 +-
 versions/master/doxygen/inherit_graph_41.md5       |    2 +-
 versions/master/doxygen/inherit_graph_41.png       |  Bin 1712 -> 1478 bytes
 versions/master/doxygen/inherit_graph_42.map       |    2 +-
 versions/master/doxygen/inherit_graph_42.md5       |    2 +-
 versions/master/doxygen/inherit_graph_42.png       |  Bin 1993 -> 2073 bytes
 versions/master/doxygen/inherit_graph_43.map       |    2 +-
 versions/master/doxygen/inherit_graph_43.md5       |    2 +-
 versions/master/doxygen/inherit_graph_43.png       |  Bin 1273 -> 1712 bytes
 versions/master/doxygen/inherit_graph_44.map       |    2 +-
 versions/master/doxygen/inherit_graph_44.md5       |    2 +-
 versions/master/doxygen/inherit_graph_44.png       |  Bin 1506 -> 1917 bytes
 versions/master/doxygen/inherit_graph_45.map       |    2 +-
 versions/master/doxygen/inherit_graph_45.md5       |    2 +-
 versions/master/doxygen/inherit_graph_45.png       |  Bin 1891 -> 3035 bytes
 .../{inherit_graph_39.map => inherit_graph_46.map} |    0
 .../{inherit_graph_39.md5 => inherit_graph_46.md5} |    0
 .../{inherit_graph_39.png => inherit_graph_46.png} |  Bin
 .../{inherit_graph_40.map => inherit_graph_47.map} |    0
 .../{inherit_graph_40.md5 => inherit_graph_47.md5} |    0
 .../{inherit_graph_40.png => inherit_graph_47.png} |  Bin
 .../{inherit_graph_41.map => inherit_graph_48.map} |    0
 .../{inherit_graph_41.md5 => inherit_graph_48.md5} |    0
 .../{inherit_graph_41.png => inherit_graph_48.png} |  Bin
 .../{inherit_graph_42.map => inherit_graph_49.map} |    0
 .../{inherit_graph_42.md5 => inherit_graph_49.md5} |    0
 .../{inherit_graph_42.png => inherit_graph_49.png} |  Bin
 .../{inherit_graph_43.map => inherit_graph_50.map} |    0
 .../{inherit_graph_43.md5 => inherit_graph_50.md5} |    0
 .../{inherit_graph_43.png => inherit_graph_50.png} |  Bin
 .../{inherit_graph_44.map => inherit_graph_51.map} |    0
 .../{inherit_graph_44.md5 => inherit_graph_51.md5} |    0
 .../{inherit_graph_44.png => inherit_graph_51.png} |  Bin
 .../{inherit_graph_45.map => inherit_graph_52.map} |    0
 .../{inherit_graph_45.md5 => inherit_graph_52.md5} |    0
 .../{inherit_graph_45.png => inherit_graph_52.png} |  Bin
 versions/master/doxygen/inherits.html              |  100 +-
 versions/master/doxygen/io_8h.html                 |    2 +-
 versions/master/doxygen/io_8h_source.html          |    2 +-
 versions/master/doxygen/kvstore_8h.html            |    2 +-
 versions/master/doxygen/kvstore_8h_source.html     |    2 +-
 versions/master/doxygen/lazy__alloc__array_8h.html |    2 +-
 .../doxygen/lazy__alloc__array_8h_source.html      |    2 +-
 versions/master/doxygen/namespacedmlc.html         |    2 +-
 .../master/doxygen/namespacedmlc_1_1parameter.html |    2 +-
 versions/master/doxygen/namespacemembers.html      |   15 +-
 versions/master/doxygen/namespacemembers_enum.html |    2 +-
 versions/master/doxygen/namespacemembers_eval.html |    2 +-
 versions/master/doxygen/namespacemembers_func.html |   17 +-
 versions/master/doxygen/namespacemembers_type.html |    2 +-
 versions/master/doxygen/namespacemxnet.html        |    2 +-
 .../master/doxygen/namespacemxnet_1_1common.html   |   58 +-
 .../doxygen/namespacemxnet_1_1common_1_1cuda.html  |    2 +-
 .../namespacemxnet_1_1common_1_1helper.html        |    2 +-
 ...tml => namespacemxnet_1_1common_1_1random.html} |   17 +-
 versions/master/doxygen/namespacemxnet_1_1csr.html |    2 +-
 .../master/doxygen/namespacemxnet_1_1engine.html   |    2 +-
 versions/master/doxygen/namespacemxnet_1_1op.html  |    2 +-
 .../doxygen/namespacemxnet_1_1rowsparse.html       |    2 +-
 versions/master/doxygen/namespacemxnet_1_1rtc.html |    2 +-
 versions/master/doxygen/namespaces.html            |   15 +-
 versions/master/doxygen/ndarray_8h.html            |    2 +-
 versions/master/doxygen/ndarray_8h_source.html     | 1155 ++++++++++----------
 versions/master/doxygen/object__pool_8h.html       |    2 +-
 .../master/doxygen/object__pool_8h_source.html     |    2 +-
 versions/master/doxygen/op__attr__types_8h.html    |    4 +-
 .../master/doxygen/op__attr__types_8h__incl.map    |   12 +-
 .../master/doxygen/op__attr__types_8h__incl.md5    |    2 +-
 .../master/doxygen/op__attr__types_8h__incl.png    |  Bin 104120 -> 142032 bytes
 .../master/doxygen/op__attr__types_8h_source.html  |    2 +-
 versions/master/doxygen/operator_8h.html           |    4 +-
 versions/master/doxygen/operator_8h__incl.map      |   14 +-
 versions/master/doxygen/operator_8h__incl.md5      |    2 +-
 versions/master/doxygen/operator_8h__incl.png      |  Bin 174099 -> 220105 bytes
 versions/master/doxygen/operator_8h_source.html    |    2 +-
 versions/master/doxygen/operator__util_8h.html     |    4 +-
 .../master/doxygen/operator__util_8h__incl.map     |   16 +-
 .../master/doxygen/operator__util_8h__incl.md5     |    2 +-
 .../master/doxygen/operator__util_8h__incl.png     |  Bin 213517 -> 255953 bytes
 .../master/doxygen/operator__util_8h_source.html   |    4 +-
 ...oc__array_8h.html => random__generator_8h.html} |   56 +-
 .../doxygen/random__generator_8h__dep__incl.map    |   11 +
 .../doxygen/random__generator_8h__dep__incl.md5    |    1 +
 .../doxygen/random__generator_8h__dep__incl.png    |  Bin 0 -> 37155 bytes
 .../master/doxygen/random__generator_8h__incl.map  |    4 +
 .../master/doxygen/random__generator_8h__incl.md5  |    1 +
 .../master/doxygen/random__generator_8h__incl.png  |  Bin 0 -> 46188 bytes
 .../doxygen/random__generator_8h_source.html       |  335 ++++++
 versions/master/doxygen/resource_8h.html           |    5 +-
 versions/master/doxygen/resource_8h__incl.map      |    6 +-
 versions/master/doxygen/resource_8h__incl.md5      |    2 +-
 versions/master/doxygen/resource_8h__incl.png      |  Bin 37145 -> 71108 bytes
 versions/master/doxygen/resource_8h_source.html    |  188 ++--
 versions/master/doxygen/rtc_8h.html                |    2 +-
 versions/master/doxygen/rtc_8h_source.html         |    2 +-
 versions/master/doxygen/search/all_11.js           |   12 +-
 versions/master/doxygen/search/all_12.js           |    3 +-
 versions/master/doxygen/search/all_13.js           |    2 +-
 versions/master/doxygen/search/all_14.js           |    1 +
 versions/master/doxygen/search/all_17.js           |    1 +
 versions/master/doxygen/search/all_2.js            |    1 +
 versions/master/doxygen/search/all_3.js            |    2 +-
 versions/master/doxygen/search/all_5.js            |    1 +
 versions/master/doxygen/search/all_7.js            |    2 +
 versions/master/doxygen/search/all_8.js            |    1 +
 versions/master/doxygen/search/all_a.js            |    4 +
 versions/master/doxygen/search/all_b.js            |    3 +
 versions/master/doxygen/search/all_c.js            |    3 +-
 versions/master/doxygen/search/all_d.js            |    2 +
 versions/master/doxygen/search/all_e.js            |    1 +
 versions/master/doxygen/search/all_f.js            |    2 +-
 versions/master/doxygen/search/classes_7.js        |    3 +
 versions/master/doxygen/search/classes_d.js        |    4 +
 versions/master/doxygen/search/enumvalues_0.js     |    1 +
 versions/master/doxygen/search/files_9.js          |    1 +
 versions/master/doxygen/search/functions_1.js      |    1 +
 versions/master/doxygen/search/functions_11.js     |    3 +-
 versions/master/doxygen/search/functions_12.js     |    6 +-
 versions/master/doxygen/search/functions_13.js     |   14 +-
 versions/master/doxygen/search/functions_14.html   |   27 +
 .../search/{functions_13.js => functions_14.js}    |    1 +
 versions/master/doxygen/search/functions_4.js      |    1 +
 versions/master/doxygen/search/functions_6.js      |    1 +
 versions/master/doxygen/search/functions_7.js      |    1 +
 versions/master/doxygen/search/functions_8.js      |    1 +
 versions/master/doxygen/search/functions_9.js      |    3 +-
 versions/master/doxygen/search/functions_a.js      |    1 +
 versions/master/doxygen/search/functions_b.js      |    1 +
 versions/master/doxygen/search/functions_c.js      |    2 +-
 versions/master/doxygen/search/functions_e.js      |    3 +-
 versions/master/doxygen/search/functions_f.js      |    1 +
 versions/master/doxygen/search/namespaces_1.js     |    1 +
 versions/master/doxygen/search/search.js           |    2 +-
 versions/master/doxygen/search/typedefs_5.js       |    1 +
 versions/master/doxygen/search/variables_8.js      |    2 +
 versions/master/doxygen/static__array_8h.html      |    2 +-
 .../master/doxygen/static__array_8h_source.html    |    2 +-
 versions/master/doxygen/storage_8h.html            |    2 +-
 versions/master/doxygen/storage_8h_source.html     |    2 +-
 .../doxygen/structMXCallbackList-members.html      |    2 +-
 versions/master/doxygen/structMXCallbackList.html  |    2 +-
 .../doxygen/structNDArrayOpInfo-members.html       |    2 +-
 versions/master/doxygen/structNDArrayOpInfo.html   |    2 +-
 .../master/doxygen/structNativeOpInfo-members.html |    2 +-
 versions/master/doxygen/structNativeOpInfo.html    |    2 +-
 .../structmxnet_1_1CachedOpParam-members.html      |    2 +-
 .../doxygen/structmxnet_1_1CachedOpParam.html      |    2 +-
 .../doxygen/structmxnet_1_1Context-members.html    |    2 +-
 .../master/doxygen/structmxnet_1_1Context.html     |    2 +-
 .../doxygen/structmxnet_1_1DataBatch-members.html  |    2 +-
 .../master/doxygen/structmxnet_1_1DataBatch.html   |    2 +-
 .../doxygen/structmxnet_1_1DataInst-members.html   |    2 +-
 .../master/doxygen/structmxnet_1_1DataInst.html    |    2 +-
 .../doxygen/structmxnet_1_1DataIteratorReg.html    |    2 +-
 .../structmxnet_1_1NDArrayFunctionReg-members.html |    2 +-
 .../doxygen/structmxnet_1_1NDArrayFunctionReg.html |    2 +-
 .../doxygen/structmxnet_1_1OpContext-members.html  |    2 +-
 .../master/doxygen/structmxnet_1_1OpContext.html   |    2 +-
 ...structmxnet_1_1OperatorPropertyReg-members.html |    2 +-
 .../structmxnet_1_1OperatorPropertyReg.html        |    2 +-
 .../doxygen/structmxnet_1_1Resource-members.html   |   21 +-
 .../master/doxygen/structmxnet_1_1Resource.html    |   40 +-
 .../structmxnet_1_1ResourceRequest-members.html    |   13 +-
 .../doxygen/structmxnet_1_1ResourceRequest.html    |    8 +-
 .../structmxnet_1_1Resource__coll__graph.md5       |    2 +-
 .../structmxnet_1_1Resource__coll__graph.png       |  Bin 12279 -> 10201 bytes
 .../doxygen/structmxnet_1_1RunContext-members.html |    2 +-
 .../master/doxygen/structmxnet_1_1RunContext.html  |    2 +-
 .../structmxnet_1_1Storage_1_1Handle-members.html  |    2 +-
 .../doxygen/structmxnet_1_1Storage_1_1Handle.html  |    2 +-
 ...1_1common_1_1ObjectPoolAllocatable-members.html |    2 +-
 ...ctmxnet_1_1common_1_1ObjectPoolAllocatable.html |    2 +-
 ...ructmxnet_1_1common_1_1StaticArray-members.html |    2 +-
 .../structmxnet_1_1common_1_1StaticArray.html      |    2 +-
 ...mxnet_1_1common_1_1csr__idx__check-members.html |    2 +-
 .../structmxnet_1_1common_1_1csr__idx__check.html  |    2 +-
 ...et_1_1common_1_1csr__indptr__check-members.html |    2 +-
 ...tructmxnet_1_1common_1_1csr__indptr__check.html |    2 +-
 ...et_1_1common_1_1helper_1_1UniqueIf-members.html |    2 +-
 ...tructmxnet_1_1common_1_1helper_1_1UniqueIf.html |    2 +-
 ...on_1_1helper_1_1UniqueIf_3_01T[]_4-members.html |    2 +-
 ..._1_1common_1_1helper_1_1UniqueIf_3_01T[]_4.html |    2 +-
 ...1helper_1_1UniqueIf_3_01T[kSize]_4-members.html |    2 +-
 ...ommon_1_1helper_1_1UniqueIf_3_01T[kSize]_4.html |    2 +-
 ...mxnet_1_1common_1_1rsp__idx__check-members.html |    2 +-
 .../structmxnet_1_1common_1_1rsp__idx__check.html  |    2 +-
 .../structmxnet_1_1op_1_1EnvArguments-members.html |    2 +-
 .../doxygen/structmxnet_1_1op_1_1EnvArguments.html |    2 +-
 ...xnet_1_1op_1_1GradFunctionArgument-members.html |    2 +-
 .../structmxnet_1_1op_1_1GradFunctionArgument.html |    2 +-
 .../structmxnet_1_1op_1_1Input0-members.html       |    2 +-
 .../doxygen/structmxnet_1_1op_1_1Input0.html       |    2 +-
 .../structmxnet_1_1op_1_1Input1-members.html       |    2 +-
 .../doxygen/structmxnet_1_1op_1_1Input1.html       |    2 +-
 .../structmxnet_1_1op_1_1OutputGrad-members.html   |    2 +-
 .../doxygen/structmxnet_1_1op_1_1OutputGrad.html   |    2 +-
 .../structmxnet_1_1op_1_1OutputValue-members.html  |    2 +-
 .../doxygen/structmxnet_1_1op_1_1OutputValue.html  |    2 +-
 ...et_1_1rtc_1_1CudaModule_1_1ArgType-members.html |    2 +-
 ...tructmxnet_1_1rtc_1_1CudaModule_1_1ArgType.html |    2 +-
 versions/master/doxygen/tensor__blob_8h.html       |    2 +-
 .../master/doxygen/tensor__blob_8h_source.html     |    2 +-
 versions/master/doxygen/utils_8h.html              |   10 +-
 versions/master/doxygen/utils_8h__incl.map         |   16 +-
 versions/master/doxygen/utils_8h__incl.md5         |    2 +-
 versions/master/doxygen/utils_8h__incl.png         |  Bin 186507 -> 254756 bytes
 versions/master/doxygen/utils_8h_source.html       |  763 ++++++-------
 versions/master/faq/bucketing.html                 |    2 +-
 versions/master/faq/caffe.html                     |    2 +-
 versions/master/faq/cloud.html                     |    2 +-
 versions/master/faq/develop_and_hack.html          |    4 +-
 versions/master/faq/env_var.html                   |    2 +-
 versions/master/faq/faq.html                       |    2 +-
 versions/master/faq/finetune.html                  |    2 +-
 versions/master/faq/gradient_compression.html      |    2 +-
 versions/master/faq/index.html                     |    4 +-
 versions/master/faq/model_parallel_lstm.html       |    2 +-
 versions/master/faq/multi_devices.html             |    2 +-
 versions/master/faq/new_op.html                    |    2 +-
 versions/master/faq/nnpack.html                    |    2 +-
 versions/master/faq/perf.html                      |    2 +-
 versions/master/faq/recordio.html                  |    2 +-
 versions/master/faq/s3_integration.html            |    2 +-
 versions/master/faq/smart_device.html              |    2 +-
 versions/master/faq/torch.html                     |   12 +-
 versions/master/faq/visualize_graph.html           |    2 +-
 versions/master/genindex.html                      |   50 +-
 versions/master/get_started/index.html             |    2 +-
 versions/master/get_started/why_mxnet.html         |    2 +-
 versions/master/gluon/index.html                   |    2 +-
 versions/master/how_to/add_op_in_backend.html      |    2 +-
 versions/master/how_to/security.html               |    2 +-
 versions/master/index.html                         |   24 +-
 versions/master/install/amazonlinux_setup.html     |    2 +-
 versions/master/install/build_from_source.html     |   50 +-
 versions/master/install/centos_setup.html          |    2 +-
 versions/master/install/index.html                 |   55 +-
 versions/master/install/osx_setup.html             |    2 +-
 versions/master/install/raspbian_setup.html        |    2 +-
 versions/master/install/tx2_setup.html             |    2 +-
 versions/master/install/ubuntu_setup.html          |    2 +-
 versions/master/install/windows_setup.html         |   24 +-
 versions/master/model_zoo/index.html               |    2 +-
 versions/master/objects.inv                        |  Bin 10301 -> 10407 bytes
 versions/master/py-modindex.html                   |    2 +-
 versions/master/search.html                        |    2 +-
 versions/master/searchindex.js                     |    2 +-
 versions/master/tutorials/basic/data.html          |    3 +-
 versions/master/tutorials/basic/image_io.html      |   22 +-
 versions/master/tutorials/basic/image_io.ipynb     |    2 +-
 versions/master/tutorials/basic/module.html        |   69 +-
 versions/master/tutorials/basic/module.ipynb       |    2 +-
 versions/master/tutorials/basic/ndarray.html       |    3 +-
 .../master/tutorials/basic/ndarray_indexing.html   |    3 +-
 versions/master/tutorials/basic/record_io.html     |    2 +-
 versions/master/tutorials/basic/symbol.html        |    3 +-
 versions/master/tutorials/c++/basics.html          |   19 +-
 .../master/tutorials/embedded/wine_detector.html   |    2 +-
 .../general_ml/recommendation_systems.html         |   12 +-
 versions/master/tutorials/gluon/autograd.html      |    2 +-
 versions/master/tutorials/gluon/customop.html      |    2 +-
 versions/master/tutorials/gluon/gluon.html         |    2 +-
 versions/master/tutorials/gluon/hybrid.html        |    2 +-
 versions/master/tutorials/gluon/mnist.html         |    4 +-
 versions/master/tutorials/gluon/mnist.ipynb        |    2 +-
 versions/master/tutorials/gluon/ndarray.html       |    2 +-
 versions/master/tutorials/index.html               |   17 +-
 versions/master/tutorials/nlp/cnn.html             |  164 +--
 versions/master/tutorials/nlp/nce_loss.html        |   12 +-
 versions/master/tutorials/nlp/rnn.html             |   12 +-
 versions/master/tutorials/python/kvstore.html      |    2 +-
 .../master/tutorials/python/linear-regression.html |    6 +-
 .../tutorials/python/linear-regression.ipynb       |    2 +-
 .../tutorials/python/matrix_factorization.html     |    2 +-
 versions/master/tutorials/python/mnist.html        |    5 +-
 versions/master/tutorials/python/mnist.ipynb       |    2 +-
 .../master/tutorials/python/predict_image.html     |    3 +-
 versions/master/tutorials/r/CallbackFunction.html  |    2 +-
 versions/master/tutorials/r/CustomIterator.html    |    2 +-
 .../master/tutorials/r/CustomLossFunction.html     |    2 +-
 versions/master/tutorials/r/charRnnModel.html      |    2 +-
 .../r/classifyRealImageWithPretrainedModel.html    |    2 +-
 .../tutorials/r/fiveMinutesNeuralNetwork.html      |    2 +-
 versions/master/tutorials/r/index.html             |    2 +-
 versions/master/tutorials/r/mnistCompetition.html  |    2 +-
 versions/master/tutorials/r/ndarray.html           |    2 +-
 versions/master/tutorials/r/symbol.html            |    2 +-
 versions/master/tutorials/scala/char_lstm.html     |    2 +-
 versions/master/tutorials/scala/mnist.html         |    2 +-
 .../tutorials/scala/mxnet_scala_on_intellij.html   |    2 +-
 versions/master/tutorials/sparse/csr.html          |  118 +-
 versions/master/tutorials/sparse/csr.ipynb         |    2 +-
 versions/master/tutorials/sparse/row_sparse.html   |  115 +-
 versions/master/tutorials/sparse/row_sparse.ipynb  |    2 +-
 versions/master/tutorials/sparse/train.html        |   52 +-
 versions/master/tutorials/sparse/train.ipynb       |    2 +-
 .../speech_recognition/baidu_warp_ctc.html         |    2 +-
 .../tutorials/speech_recognition/speech_lstm.html  |    2 +-
 .../unsupervised_learning/auto_encoders.html       |    2 +-
 .../tutorials/unsupervised_learning/gan.html       |  191 ++--
 .../vision/large_scale_classification.html         |    3 +-
 740 files changed, 7698 insertions(+), 3388 deletions(-)

diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 468be29..193f5b0 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -5,9 +5,15 @@
 ### Essentials ###
 - [ ] Passed code style checking (`make lint`)
 - [ ] Changes are complete (i.e. I finished coding on this PR)
-- [ ] All changes have test coverage
-- [ ] For user-facing API changes, API doc string has been updated. For new C++ functions in header files, their functionalities and arguments are well-documented. 
-- [ ] To my best knowledge, examples are either not affected by this change, or have been fixed to be compatible with this change
+- [ ] All changes have test coverage:
+- Unit tests are added for small changes to verify correctness (e.g. adding a new operator)
+- Nightly tests are added for complicated/long-running ones (e.g. changing distributed kvstore)
+- Build tests will be added for build configuration changes (e.g. adding a new build option with NCCL)
+- [ ] Code is well-documented: 
+- For user-facing API changes, API doc string has been updated. 
+- For new C++ functions in header files, their functionalities and arguments are documented. 
+- For new examples, README.md is added to explain the what the example does, the source of the dataset, expected performance on test set and reference to the original paper if applicable
+- [ ] To the my best knowledge, examples are either not affected by this change, or have been fixed to be compatible with this change
 
 ### Changes ###
 - [ ] Feature1, tests, (and when applicable, API doc)
diff --git a/versions/master/README.html b/versions/master/README.html
index 68b986d..fedea6a 100644
--- a/versions/master/README.html
+++ b/versions/master/README.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/index.html b/versions/master/_modules/index.html
index 992e171..3c7a689 100644
--- a/versions/master/_modules/index.html
+++ b/versions/master/_modules/index.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -243,138 +243,146 @@
 <li><a href="mxnet/module/module.html">mxnet.module.module</a></li>
 <li><a href="mxnet/module/python_module.html">mxnet.module.python_module</a></li>
 <li><a href="mxnet/module/sequential_module.html">mxnet.module.sequential_module</a></li>
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbo [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/c [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/c [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/l [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/li [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/l [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linal [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/lina [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linal [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/lin [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/lina [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/li [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/lin [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/l [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/li [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/l [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linal [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/lina [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linal [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/spa [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/spar [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sp [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/spa [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/s [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sp [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/s [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/ [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/spars [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/spar [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/spars [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/spa [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/spar [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sp [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/spa [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse/s [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sp [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse. [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse.h [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse.html">mxnet.m [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse/sparse.html">mxnet.mx [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mx [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxn [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxn [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxne [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxne [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet. [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet. [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.m [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.m [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mx [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mx [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxn [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxn [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxne [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxne [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet. [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.c [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.cont [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib. [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.co [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.con [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contr [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contri [...]
-<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbo [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/cont [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linal [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/lina [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linal [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/lin [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/lina [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/li [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/lin [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/l [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/li [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/l [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linal [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/lina [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linal [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/lin [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/lina [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/li [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/lin [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/s [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sp [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/s [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/ [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/spars [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/spar [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/spars [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/spa [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse/spar [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse.html">mxn [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse/sparse.html">mxne [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse.html">mxnet.mxnet.mxne [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse/sparse.html">mxnet.mxnet.mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/sparse.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.m [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.m [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mx [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mx [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxn [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxn [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxne [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxne [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.m [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.m [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mx [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.co [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contri [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/linalg.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib. [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.c [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.con [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contr [...]
+<li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib [...]
 <li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib</a></li>
 <li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/symbol/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.symbol.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib</a></li>
 <li><a href="mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/mxnet/ndarray/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib/contrib.html">mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.mxnet.ndarray.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib.contrib</a></li>
diff --git a/versions/master/_modules/mxnet/autograd.html b/versions/master/_modules/mxnet/autograd.html
index beecb93..d63105b 100644
--- a/versions/master/_modules/mxnet/autograd.html
+++ b/versions/master/_modules/mxnet/autograd.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -578,7 +578,7 @@
 
 <span class="sd">    For example, a stable sigmoid function can be defined as::</span>
 
-<span class="sd">        class sigmoid(Function):</span>
+<span class="sd">        class sigmoid(mx.autograd.Function):</span>
 <span class="sd">            def forward(self, x):</span>
 <span class="sd">                y = 1 / (1 + mx.nd.exp(-x))</span>
 <span class="sd">                self.save_for_backward(y)</span>
@@ -589,6 +589,18 @@
 <span class="sd">                # and returns as many NDArrays as forward's arguments.</span>
 <span class="sd">                y, = self.saved_tensors</span>
 <span class="sd">                return y * (1-y)</span>
+
+<span class="sd">    Then, the function can be used in the following way::</span>
+
+<span class="sd">        func = sigmoid()</span>
+<span class="sd">        x = mx.nd.random.uniform(shape=(10,))</span>
+<span class="sd">        x.attach_grad()</span>
+
+<span class="sd">        with mx.autograd.record():</span>
+<span class="sd">            m = func(x)</span>
+<span class="sd">            m.backward()</span>
+<span class="sd">        dx = x.grad.asnumpy()</span>
+
 <span class="sd">    """</span>
     <span class="n">_bwd_functype</span> <span class="o">=</span> <span class="n">CFUNCTYPE</span><span class="p">(</span><span class="n">c_int</span><span class="p">,</span> <span class="n">c_int</span><span class="p">,</span> <span class="n">c_int</span><span class="p">,</span> <span class="n">POINTER</span><span class="p">(</span><span class="n">c_void_p</span><span class="p">),</span>
                               <span class="n">POINTER</span><span class="p">(</span><span class="n">c_int</span><span class="p">),</span> <span class="n">c_int</span><span class="p">,</span> <span class="n">c_void_p</span><span class="p">)</span>
diff --git a/versions/master/_modules/mxnet/callback.html b/versions/master/_modules/mxnet/callback.html
index b8982c7..badd319 100644
--- a/versions/master/_modules/mxnet/callback.html
+++ b/versions/master/_modules/mxnet/callback.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/executor.html b/versions/master/_modules/mxnet/executor.html
index 0a0d25d..540e880 100644
--- a/versions/master/_modules/mxnet/executor.html
+++ b/versions/master/_modules/mxnet/executor.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/executor_manager.html b/versions/master/_modules/mxnet/executor_manager.html
index 27d241b..4d955cf 100644
--- a/versions/master/_modules/mxnet/executor_manager.html
+++ b/versions/master/_modules/mxnet/executor_manager.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/block.html b/versions/master/_modules/mxnet/gluon/block.html
index 42b2b84..f9e8327 100644
--- a/versions/master/_modules/mxnet/gluon/block.html
+++ b/versions/master/_modules/mxnet/gluon/block.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/contrib/rnn/conv_rnn_cell.html b/versions/master/_modules/mxnet/gluon/contrib/rnn/conv_rnn_cell.html
index 837c7bf..af2b5d5 100644
--- a/versions/master/_modules/mxnet/gluon/contrib/rnn/conv_rnn_cell.html
+++ b/versions/master/_modules/mxnet/gluon/contrib/rnn/conv_rnn_cell.html
@@ -51,7 +51,7 @@
 <link href="../../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/contrib/rnn/rnn_cell.html b/versions/master/_modules/mxnet/gluon/contrib/rnn/rnn_cell.html
index 3b7f1bd..d02d5dc 100644
--- a/versions/master/_modules/mxnet/gluon/contrib/rnn/rnn_cell.html
+++ b/versions/master/_modules/mxnet/gluon/contrib/rnn/rnn_cell.html
@@ -51,7 +51,7 @@
 <link href="../../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/data/dataloader.html b/versions/master/_modules/mxnet/gluon/data/dataloader.html
index 9e7c808..a7233d2 100644
--- a/versions/master/_modules/mxnet/gluon/data/dataloader.html
+++ b/versions/master/_modules/mxnet/gluon/data/dataloader.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -231,9 +231,7 @@
 <span class="kn">from</span> <span class="nn">multiprocessing.reduction</span> <span class="kn">import</span> <span class="n">ForkingPickler</span>
 <span class="kn">import</span> <span class="nn">pickle</span>
 <span class="kn">import</span> <span class="nn">io</span>
-<span class="kn">import</span> <span class="nn">os</span>
 <span class="kn">import</span> <span class="nn">sys</span>
-<span class="kn">import</span> <span class="nn">warnings</span>
 <span class="kn">import</span> <span class="nn">numpy</span> <span class="kn">as</span> <span class="nn">np</span>
 
 <span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">sampler</span> <span class="k">as</span> <span class="n">_sampler</span>
@@ -258,7 +256,7 @@
 <span class="sd">    NDArray via shared memory."""</span>
 
     <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">conn</span><span class="p">):</span>
-        <span class="bp">self</span><span class="o">.</span><span class="n">conn</span> <span class="o">=</span> <span class="n">conn</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">_conn</span> <span class="o">=</span> <span class="n">conn</span>
 
     <span class="k">def</span> <span class="nf">send</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">obj</span><span class="p">):</span>
         <span class="sd">"""Send object"""</span>
@@ -273,7 +271,8 @@
 
     <span class="k">def</span> <span class="fm">__getattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
         <span class="sd">"""Emmulate conn"""</span>
-        <span class="k">return</span> <span class="nb">getattr</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">conn</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>
+        <span class="n">attr</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'_conn'</span><span class="p">,</span> <span class="bp">None</span><span class="p">)</span>
+        <span class="k">return</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>
 
 
 <span class="k">class</span> <span class="nc">Queue</span><span class="p">(</span><span class="n">multiprocessing</span><span class="o">.</span><span class="n">queues</span><span class="o">.</span><span class="n">Queue</span><span class="p">):</span>
@@ -394,9 +393,6 @@
                              <span class="s2">"not be specified if batch_sampler is specified."</span><span class="p">)</span>
 
         <span class="bp">self</span><span class="o">.</span><span class="n">_batch_sampler</span> <span class="o">=</span> <span class="n">batch_sampler</span>
-        <span class="k">if</span> <span class="n">num_workers</span> <span class="o">></span> <span class="mi">0</span> <span class="ow">and</span> <span class="n">os</span><span class="o">.</span><span class="n">name</span> <span class="o">==</span> <span class="s1">'nt'</span><span class="p">:</span>
-            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s2">"DataLoader does not support num_workers > 0 on Windows yet."</span><span class="p">)</span>
-            <span class="n">num_workers</span> <span class="o">=</span> <span class="mi">0</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">_num_workers</span> <span class="o">=</span> <span class="n">num_workers</span>
         <span class="k">if</span> <span class="n">batchify_fn</span> <span class="ow">is</span> <span class="bp">None</span><span class="p">:</span>
             <span class="k">if</span> <span class="n">num_workers</span> <span class="o">></span> <span class="mi">0</span><span class="p">:</span>
@@ -426,10 +422,11 @@
 
         <span class="k">for</span> <span class="n">idx</span><span class="p">,</span> <span class="n">batch</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_batch_sampler</span><span class="p">):</span>
             <span class="n">key_queue</span><span class="o">.</span><span class="n">put</span><span class="p">((</span><span class="n">idx</span><span class="p">,</span> <span class="n">batch</span><span class="p">))</span>
+        <span class="n">num_batches</span> <span class="o">=</span> <span class="n">idx</span> <span class="o">+</span> <span class="mi">1</span>
 
         <span class="n">data_buffer</span> <span class="o">=</span> <span class="p">{}</span>
         <span class="n">curr_idx</span> <span class="o">=</span> <span class="mi">0</span>
-        <span class="k">for</span> <span class="n">_</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_batch_sampler</span><span class="p">)):</span>
+        <span class="k">for</span> <span class="n">_</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_batches</span><span class="p">):</span>
             <span class="n">idx</span><span class="p">,</span> <span class="n">batch</span> <span class="o">=</span> <span class="n">data_queue</span><span class="o">.</span><span class="n">get</span><span class="p">()</span>
             <span class="n">data_buffer</span><span class="p">[</span><span class="n">idx</span><span class="p">]</span> <span class="o">=</span> <span class="n">batch</span>
             <span class="k">while</span> <span class="n">curr_idx</span> <span class="ow">in</span> <span class="n">data_buffer</span><span class="p">:</span>
diff --git a/versions/master/_modules/mxnet/gluon/data/dataset.html b/versions/master/_modules/mxnet/gluon/data/dataset.html
index 246a0a7..0043671 100644
--- a/versions/master/_modules/mxnet/gluon/data/dataset.html
+++ b/versions/master/_modules/mxnet/gluon/data/dataset.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -261,8 +261,8 @@
         <span class="bp">self</span><span class="o">.</span><span class="n">_data</span> <span class="o">=</span> <span class="p">[]</span>
         <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">data</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">args</span><span class="p">):</span>
             <span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">data</span><span class="p">)</span> <span class="o">==</span> <span class="bp">self</span><span class="o">.</span><span class="n">_length</span><span class="p">,</span> \
-                <span class="s2">"All arrays must have the same length. But the first has </span><span class="si">%s</span><span class="s2"> "</span> \
-                <span class="s2">"while the </span><span class="si">%d</span><span class="s2">-th has </span><span class="si">%d</span><span class="s2">."</span><span class="o">%</span><span class="p">(</span><span class="n">length</span><span class="p">,</span> <span class="n">i</span><span class="o">+</span><span class="mi">1</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">data</span><span class="p">))</span>
+                <span class="s2">"All arrays must have the same length; array[0] has length </span><span class="si">%d</span><span class="s2"> "</span> \
+                <span class="s2">"while array[</span><span class="si">%d</span><span class="s2">] has </span><span class="si">%d</span><span class="s2">."</span> <span class="o">%</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_length</span><span class="p">,</span> <span class="n">i</span><span class="o">+</span><span class="mi">1</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">data</spa [...]
             <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">NDArray</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">data</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span>< [...]
                 <span class="n">data</span> <span class="o">=</span> <span class="n">data</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
             <span class="bp">self</span><span class="o">.</span><span class="n">_data</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">data</span><span class="p">)</span>
diff --git a/versions/master/_modules/mxnet/gluon/data/sampler.html b/versions/master/_modules/mxnet/gluon/data/sampler.html
index aeff187..41220c4 100644
--- a/versions/master/_modules/mxnet/gluon/data/sampler.html
+++ b/versions/master/_modules/mxnet/gluon/data/sampler.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/data/vision.html b/versions/master/_modules/mxnet/gluon/data/vision.html
index cb429b1..b85b0b0 100644
--- a/versions/master/_modules/mxnet/gluon/data/vision.html
+++ b/versions/master/_modules/mxnet/gluon/data/vision.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/loss.html b/versions/master/_modules/mxnet/gluon/loss.html
index c6e85db..ab88da7 100644
--- a/versions/master/_modules/mxnet/gluon/loss.html
+++ b/versions/master/_modules/mxnet/gluon/loss.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision.html
index fbc7dc2..a06ebf8 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -245,8 +245,8 @@
 <span class="sd">    squeezenet = vision.squeezenet1_0()</span>
 <span class="sd">    densenet = vision.densenet_161()</span>
 
-<span class="sd">We provide pre-trained models for all the models except ResNet V2.</span>
-<span class="sd">These can constructed by passing ``pretrained=True``:</span>
+<span class="sd">We provide pre-trained models for all the listed models.</span>
+<span class="sd">These models can constructed by passing ``pretrained=True``:</span>
 
 <span class="sd">.. code::</span>
 
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/alexnet.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/alexnet.html
index 6e78f7b..076d448 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/alexnet.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/alexnet.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/densenet.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/densenet.html
index 74e0eb6..a827c29 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/densenet.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/densenet.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/inception.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/inception.html
index 581f350..85d1015 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/inception.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/inception.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/mobilenet.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/mobilenet.html
index 76b47be..d276529 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/mobilenet.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/mobilenet.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -234,7 +234,7 @@
 <span class="c1"># Helpers</span>
 <span class="k">def</span> <span class="nf">_add_conv</span><span class="p">(</span><span class="n">out</span><span class="p">,</span> <span class="n">channels</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">kernel</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">stride</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">pad</span><span class="o">=</s [...]
     <span class="n">out</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Conv2D</span><span class="p">(</span><span class="n">channels</span><span class="p">,</span> <span class="n">kernel</span><span class="p">,</span> <span class="n">stride</span><span class="p">,</span> <span class="n">pad</span><span class="p">,</span> <span class="n">groups</span><span class="o">=</span><span class="n">nu [...]
-    <span class="n">out</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">BatchNorm</span><span class="p">(</span><span class="n">scale</span><span class="o">=</span><span class="bp">False</span><span class="p">))</span>
+    <span class="n">out</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">BatchNorm</span><span class="p">(</span><span class="n">scale</span><span class="o">=</span><span class="bp">True</span><span class="p">))</span>
     <span class="n">out</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Activation</span><span class="p">(</span><span class="s1">'relu'</span><span class="p">))</span>
 
 <span class="k">def</span> <span class="nf">_add_conv_dw</span><span class="p">(</span><span class="n">out</span><span class="p">,</span> <span class="n">dw_channels</span><span class="p">,</span> <span class="n">channels</span><span class="p">,</span> <span class="n">stride</span><span class="p">):</span>
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/resnet.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/resnet.html
index f956500..8df5328 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/resnet.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/resnet.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -308,10 +308,10 @@
     <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">channels</span><span class="p">,</span> <span class="n">stride</span><span class="p">,</span> <span class="n">downsample</span><span class="o">=</span><span class="bp">False</span><span class="p">,</span> <span class="n">in_channels</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="o">**</spa [...]
         <span class="nb">super</span><span class="p">(</span><span class="n">BottleneckV1</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">body</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">HybridSequential</span><span class="p">(</span><span class="n">prefix</span><span class="o">=</span><span class="s1">''</span><span class="p">)</span>
-        <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Conv2D</span><span class="p">(</span><span class="n">channels</span><span class="o">//</span><span class="mi">4</span><span class="p">,</span> <span class="n">kernel_size</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n" [...]
+        <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Conv2D</span><span class="p">(</span><span class="n">channels</span><span class="o">//</span><span class="mi">4</span><span class="p">,</span> <span class="n">kernel_size</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n" [...]
         <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">BatchNorm</span><span class="p">())</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Activation</span><span class="p">(</span><span class="s1">'relu'</span><span class="p">))</span>
-        <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">_conv3x3</span><span class="p">(</span><span class="n">channels</span><span class="o">//</span><span class="mi">4</span><span class="p">,</span> <span class="n">stride</span><span class="p">,</span> <span class="n">channels</span><span class="o">//</span><span class="mi">4</span><span class="p">))</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">_conv3x3</span><span class="p">(</span><span class="n">channels</span><span class="o">//</span><span class="mi">4</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="n">channels</span><span class="o">//</span><span class="mi">4</span><span class="p">))</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">BatchNorm</span><span class="p">())</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Activation</span><span class="p">(</span><span class="s1">'relu'</span><span class="p">))</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">body</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Conv2D</span><span class="p">(</span><span class="n">channels</span><span class="p">,</span> <span class="n">kernel_size</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">strides</span><span class="o">=</span><span class [...]
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/squeezenet.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/squeezenet.html
index 1369e7a..cc9e1e9 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/squeezenet.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/squeezenet.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/model_zoo/vision/vgg.html b/versions/master/_modules/mxnet/gluon/model_zoo/vision/vgg.html
index 1542b74..e57aa11 100644
--- a/versions/master/_modules/mxnet/gluon/model_zoo/vision/vgg.html
+++ b/versions/master/_modules/mxnet/gluon/model_zoo/vision/vgg.html
@@ -51,7 +51,7 @@
 <link href="../vision.html" rel="up" title="mxnet.gluon.model_zoo.vision">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/nn/basic_layers.html b/versions/master/_modules/mxnet/gluon/nn/basic_layers.html
index f336f5d..ed00bd7 100644
--- a/versions/master/_modules/mxnet/gluon/nn/basic_layers.html
+++ b/versions/master/_modules/mxnet/gluon/nn/basic_layers.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/nn/conv_layers.html b/versions/master/_modules/mxnet/gluon/nn/conv_layers.html
index 6dcd693..ff23310 100644
--- a/versions/master/_modules/mxnet/gluon/nn/conv_layers.html
+++ b/versions/master/_modules/mxnet/gluon/nn/conv_layers.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -1178,14 +1178,15 @@
 <div class="viewcode-block" id="GlobalMaxPool2D"><a class="viewcode-back" href="../../../../api/python/gluon/nn.html#mxnet.gluon.nn.GlobalMaxPool2D">[docs]</a><span class="k">class</span> <span class="nc">GlobalMaxPool2D</span><span class="p">(</span><span class="n">_Pooling</span><span class="p">):</span>
     <span class="sd">"""Global max pooling operation for spatial data."""</span>
     <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">layout</span><span class="o">=</span><span class="s1">'NCHW'</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
-        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCHW'</span><span class="p">,</span> <span class="s2">"Only supports NCW layout for now"</span>
+        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCHW'</span><span class="p">,</span> <span class="s2">"Only supports NCHW layout for now"</span>
         <span class="nb">super</span><span class="p">(</span><span class="n">GlobalMaxPool2D</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
             <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="bp">None</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="s1">'max'</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
 
+
 <div class="viewcode-block" id="GlobalMaxPool3D"><a class="viewcode-back" href="../../../../api/python/gluon/nn.html#mxnet.gluon.nn.GlobalMaxPool3D">[docs]</a><span class="k">class</span> <span class="nc">GlobalMaxPool3D</span><span class="p">(</span><span class="n">_Pooling</span><span class="p">):</span>
     <span class="sd">"""Global max pooling operation for 3D data."""</span>
     <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">layout</span><span class="o">=</span><span class="s1">'NCDHW'</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
-        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCDHW'</span><span class="p">,</span> <span class="s2">"Only supports NCW layout for now"</span>
+        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCDHW'</span><span class="p">,</span> <span class="s2">"Only supports NCDHW layout for now"</span>
         <span class="nb">super</span><span class="p">(</span><span class="n">GlobalMaxPool3D</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
             <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="bp">None</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="s1">'max'</span><span class="p">,</span> <span class="o">**</span><span class="n">k [...]
 
@@ -1201,7 +1202,7 @@
 <div class="viewcode-block" id="GlobalAvgPool2D"><a class="viewcode-back" href="../../../../api/python/gluon/nn.html#mxnet.gluon.nn.GlobalAvgPool2D">[docs]</a><span class="k">class</span> <span class="nc">GlobalAvgPool2D</span><span class="p">(</span><span class="n">_Pooling</span><span class="p">):</span>
     <span class="sd">"""Global average pooling operation for spatial data."""</span>
     <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">layout</span><span class="o">=</span><span class="s1">'NCHW'</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
-        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCHW'</span><span class="p">,</span> <span class="s2">"Only supports NCW layout for now"</span>
+        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCHW'</span><span class="p">,</span> <span class="s2">"Only supports NCHW layout for now"</span>
         <span class="nb">super</span><span class="p">(</span><span class="n">GlobalAvgPool2D</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
             <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="bp">None</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="s1">'avg'</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
 
@@ -1209,7 +1210,7 @@
 <div class="viewcode-block" id="GlobalAvgPool3D"><a class="viewcode-back" href="../../../../api/python/gluon/nn.html#mxnet.gluon.nn.GlobalAvgPool3D">[docs]</a><span class="k">class</span> <span class="nc">GlobalAvgPool3D</span><span class="p">(</span><span class="n">_Pooling</span><span class="p">):</span>
     <span class="sd">"""Global max pooling operation for 3D data."""</span>
     <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">layout</span><span class="o">=</span><span class="s1">'NCDHW'</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
-        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCDHW'</span><span class="p">,</span> <span class="s2">"Only supports NCW layout for now"</span>
+        <span class="k">assert</span> <span class="n">layout</span> <span class="o">==</span> <span class="s1">'NCDHW'</span><span class="p">,</span> <span class="s2">"Only supports NCDHW layout for now"</span>
         <span class="nb">super</span><span class="p">(</span><span class="n">GlobalAvgPool3D</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
             <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="bp">None</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="s1">'avg'</span><span class="p">,</span> <span class="o">**</span><span class="n">k [...]
 </pre></div>
diff --git a/versions/master/_modules/mxnet/gluon/parameter.html b/versions/master/_modules/mxnet/gluon/parameter.html
index 599abda..aa516e3 100644
--- a/versions/master/_modules/mxnet/gluon/parameter.html
+++ b/versions/master/_modules/mxnet/gluon/parameter.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -323,7 +323,7 @@
 
     <span class="k">def</span> <span class="fm">__repr__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
         <span class="n">s</span> <span class="o">=</span> <span class="s1">'Parameter {name} (shape={shape}, dtype={dtype})'</span>
-        <span class="k">return</span> <span class="n">s</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">)</span>
+        <span class="k">return</span> <span class="n">s</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">shape</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">dtype</span><span class=" [...]
 
     <span class="nd">@property</span>
     <span class="k">def</span> <span class="nf">grad_req</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
diff --git a/versions/master/_modules/mxnet/gluon/rnn/rnn_cell.html b/versions/master/_modules/mxnet/gluon/rnn/rnn_cell.html
index e15ff8d..2cba764 100644
--- a/versions/master/_modules/mxnet/gluon/rnn/rnn_cell.html
+++ b/versions/master/_modules/mxnet/gluon/rnn/rnn_cell.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/rnn/rnn_layer.html b/versions/master/_modules/mxnet/gluon/rnn/rnn_layer.html
index d33789d..497b61d 100644
--- a/versions/master/_modules/mxnet/gluon/rnn/rnn_layer.html
+++ b/versions/master/_modules/mxnet/gluon/rnn/rnn_layer.html
@@ -51,7 +51,7 @@
 <link href="../../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/gluon/trainer.html b/versions/master/_modules/mxnet/gluon/trainer.html
index e568533..a105944 100644
--- a/versions/master/_modules/mxnet/gluon/trainer.html
+++ b/versions/master/_modules/mxnet/gluon/trainer.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -258,7 +258,7 @@
 
 <span class="sd">    Properties</span>
 <span class="sd">    ----------</span>
-<span class="sd">    learning_rate: float</span>
+<span class="sd">    learning_rate : float</span>
 <span class="sd">        The current learning rate of the optimizer. Given an Optimizer object</span>
 <span class="sd">        optimizer, its learning rate can be accessed as optimizer.learning_rate.</span>
 <span class="sd">    """</span>
@@ -429,6 +429,9 @@
 <span class="sd">        fname : str</span>
 <span class="sd">            Path to input states file.</span>
 <span class="sd">        """</span>
+        <span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">_kv_initialized</span><span class="p">:</span>
+            <span class="bp">self</span><span class="o">.</span><span class="n">_init_kvstore</span><span class="p">()</span>
+
         <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_update_on_kvstore</span><span class="p">:</span>
             <span class="bp">self</span><span class="o">.</span><span class="n">_kvstore</span><span class="o">.</span><span class="n">load_optimizer_states</span><span class="p">(</span><span class="n">fname</span><span class="p">)</span>
             <span class="bp">self</span><span class="o">.</span><span class="n">_optimizer</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_kvstore</span><span class="o">.</span><span class="n">_updater</span><span class="o">.</span><span class="n">optimizer</span>
diff --git a/versions/master/_modules/mxnet/gluon/utils.html b/versions/master/_modules/mxnet/gluon/utils.html
index 3e3d6b0..280e264 100644
--- a/versions/master/_modules/mxnet/gluon/utils.html
+++ b/versions/master/_modules/mxnet/gluon/utils.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/image/detection.html b/versions/master/_modules/mxnet/image/detection.html
index 9ba111d..711bd3d 100644
--- a/versions/master/_modules/mxnet/image/detection.html
+++ b/versions/master/_modules/mxnet/image/detection.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -226,9 +226,11 @@
 
 <span class="kn">from</span> <span class="nn">__future__</span> <span class="kn">import</span> <span class="n">absolute_import</span><span class="p">,</span> <span class="n">print_function</span>
 
-<span class="kn">import</span> <span class="nn">random</span>
-<span class="kn">import</span> <span class="nn">logging</span>
 <span class="kn">import</span> <span class="nn">json</span>
+<span class="kn">import</span> <span class="nn">logging</span>
+<span class="kn">import</span> <span class="nn">random</span>
+<span class="kn">import</span> <span class="nn">warnings</span>
+
 <span class="kn">import</span> <span class="nn">numpy</span> <span class="kn">as</span> <span class="nn">np</span>
 
 <span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">numeric_types</span>
@@ -399,10 +401,10 @@
         <span class="bp">self</span><span class="o">.</span><span class="n">area_range</span> <span class="o">=</span> <span class="n">area_range</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">enabled</span> <span class="o">=</span> <span class="bp">False</span>
         <span class="k">if</span> <span class="p">(</span><span class="n">area_range</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="o"><=</span> <span class="mi">0</span> <span class="ow">or</span> <span class="n">area_range</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">></span> <span class="n">area_range</span><span class="p">[</span><span class="mi">1</span><span class="p">]):</span>
-            <span class="n">logging</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomCropAug due to invalid area_range: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">area_range</span><span class="p">)</span>
+            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomCropAug due to invalid area_range: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">area_range</span><span class="p">)</span>
         <span class="k">elif</span> <span class="p">(</span><span class="n">aspect_ratio_range</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">></span> <span class="n">aspect_ratio_range</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="ow">or</span> <span class="n">aspect_ratio_range</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o"><=</span> <span cla [...]
-            <span class="n">logging</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomCropAug due to invalid aspect_ratio_range: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span>
-                         <span class="n">aspect_ratio_range</span><span class="p">)</span>
+            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomCropAug due to invalid aspect_ratio_range: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span>
+                          <span class="n">aspect_ratio_range</span><span class="p">)</span>
         <span class="k">else</span><span class="p">:</span>
             <span class="bp">self</span><span class="o">.</span><span class="n">enabled</span> <span class="o">=</span> <span class="bp">True</span>
 
@@ -565,10 +567,10 @@
         <span class="bp">self</span><span class="o">.</span><span class="n">max_attempts</span> <span class="o">=</span> <span class="n">max_attempts</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">enabled</span> <span class="o">=</span> <span class="bp">False</span>
         <span class="k">if</span> <span class="p">(</span><span class="n">area_range</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="o"><=</span> <span class="mf">1.0</span> <span class="ow">or</span> <span class="n">area_range</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">></span> <span class="n">area_range</span><span class="p">[</span><span class="mi">1</span><span class="p">]):</span>
-            <span class="n">logging</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomPadAug due to invalid parameters: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">area_range</span><span class="p">)</span>
+            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomPadAug due to invalid parameters: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">area_range</span><span class="p">)</span>
         <span class="k">elif</span> <span class="p">(</span><span class="n">aspect_ratio_range</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o"><=</span> <span class="mi">0</span> <span class="ow">or</span> <span class="n">aspect_ratio_range</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">></span> <span class="n">aspect_ratio_range</span><span class="p">[</span><span class="mi">1</span><span cl [...]
-            <span class="n">logging</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomPadAug due to invalid aspect_ratio_range: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span>
-                         <span class="n">aspect_ratio_range</span><span class="p">)</span>
+            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Skip DetRandomPadAug due to invalid aspect_ratio_range: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span>
+                          <span class="n">aspect_ratio_range</span><span class="p">)</span>
         <span class="k">else</span><span class="p">:</span>
             <span class="bp">self</span><span class="o">.</span><span class="n">enabled</span> <span class="o">=</span> <span class="bp">True</span>
 
@@ -1046,7 +1048,7 @@
         <span class="k">try</span><span class="p">:</span>
             <span class="kn">import</span> <span class="nn">cv2</span>
         <span class="k">except</span> <span class="ne">ImportError</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
-            <span class="n">logging</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Unable to import cv2, skip drawing: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="n">e</span><span class="p">))</span>
+            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'Unable to import cv2, skip drawing: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="n">e</span><span class="p">))</span>
             <span class="k">raise</span> <span class="ne">StopIteration</span>
         <span class="n">count</span> <span class="o">=</span> <span class="mi">0</span>
         <span class="k">try</span><span class="p">:</span>
diff --git a/versions/master/_modules/mxnet/image/image.html b/versions/master/_modules/mxnet/image/image.html
index 95b4bd2..cba12d0 100644
--- a/versions/master/_modules/mxnet/image/image.html
+++ b/versions/master/_modules/mxnet/image/image.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/initializer.html b/versions/master/_modules/mxnet/initializer.html
index e7f3aa2..4ce5d09 100644
--- a/versions/master/_modules/mxnet/initializer.html
+++ b/versions/master/_modules/mxnet/initializer.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/io.html b/versions/master/_modules/mxnet/io.html
index d8c4cbc..89ab753 100644
--- a/versions/master/_modules/mxnet/io.html
+++ b/versions/master/_modules/mxnet/io.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -721,17 +721,13 @@
     <span class="k">return</span> <span class="nb">list</span><span class="p">(</span><span class="n">data</span><span class="o">.</span><span class="n">items</span><span class="p">())</span>
 
 <span class="k">def</span> <span class="nf">_has_instance</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">dtype</span><span class="p">):</span>
-    <span class="sd">"""return True if data has instance of dtype"""</span>
-    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">dtype</span><span class="p">):</span>
-        <span class="k">return</span> <span class="bp">True</span>
-    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
-        <span class="k">for</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">data</span><span class="p">:</span>
-            <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="n">dtype</span><span class="p">):</span>
-                <span class="k">return</span> <span class="bp">True</span>
-    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="nb">dict</span><span class="p">):</span>
-        <span class="k">for</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">data</span><span class="o">.</span><span class="n">values</span><span class="p">():</span>
-            <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="n">dtype</span><span class="p">):</span>
-                <span class="k">return</span> <span class="bp">True</span>
+    <span class="sd">"""Return True if ``data`` has instance of ``dtype``.</span>
+<span class="sd">    This function is called after _init_data.</span>
+<span class="sd">    ``data`` is a list of (str, NDArray)"""</span>
+    <span class="k">for</span> <span class="n">item</span> <span class="ow">in</span> <span class="n">data</span><span class="p">:</span>
+        <span class="n">_</span><span class="p">,</span> <span class="n">arr</span> <span class="o">=</span> <span class="n">item</span>
+        <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">arr</span><span class="p">,</span> <span class="n">dtype</span><span class="p">):</span>
+            <span class="k">return</span> <span class="bp">True</span>
     <span class="k">return</span> <span class="bp">False</span>
 
 <span class="k">def</span> <span class="nf">_shuffle</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">idx</span><span class="p">):</span>
@@ -750,7 +746,7 @@
 
 <div class="viewcode-block" id="NDArrayIter"><a class="viewcode-back" href="../../api/python/io/io.html#mxnet.io.NDArrayIter">[docs]</a><span class="k">class</span> <span class="nc">NDArrayIter</span><span class="p">(</span><span class="n">DataIter</span><span class="p">):</span>
     <span class="sd">"""Returns an iterator for ``mx.nd.NDArray``, ``numpy.ndarray``, ``h5py.Dataset``</span>
-<span class="sd">    or ``mx.nd.sparse.CSRNDArray``.</span>
+<span class="sd">    ``mx.nd.sparse.CSRNDArray`` or ``scipy.sparse.csr_matrix``.</span>
 
 <span class="sd">    Example usage:</span>
 <span class="sd">    ----------</span>
@@ -850,12 +846,13 @@
                  <span class="n">label_name</span><span class="o">=</span><span class="s1">'softmax_label'</span><span class="p">):</span>
         <span class="nb">super</span><span class="p">(</span><span class="n">NDArrayIter</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">batch_size</span><span class="p">)</span>
 
-        <span class="k">if</span> <span class="p">((</span><span class="n">_has_instance</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">CSRNDArray</span><span class="p">)</span> <span class="ow">or</span> <span class="n">_has_instance</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">CSRNDArray</span><span class="p">))</span> <span class="ow">and</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">data</span> <span class="o">=</span> <span class="n">_init_data</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">allow_empty</span><span class="o">=</span><span class="bp">False</span><span class="p">,</span> <span class="n">default_name</span><span class="o">=</span><span class="n">data_name</span><span class="p">)</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">label</span> <span class="o">=</span> <span class="n">_init_data</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">allow_empty</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span> <span class="n">default_name</span><span class="o">=</span><span class="n">label_name</span><span class="p">)</span>
+
+        <span class="k">if</span> <span class="p">((</span><span class="n">_has_instance</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">data</span><span class="p">,</span> <span class="n">CSRNDArray</span><span class="p">)</span> <span class="ow">or</span> <span class="n">_has_instance</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">label</span><span class="p">,</span> <span class="n">CSR [...]
                 <span class="p">(</span><span class="n">last_batch_handle</span> <span class="o">!=</span> <span class="s1">'discard'</span><span class="p">)):</span>
             <span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">(</span><span class="s2">"`NDArrayIter` only supports ``CSRNDArray``"</span> \
                                       <span class="s2">" with `last_batch_handle` set to `discard`."</span><span class="p">)</span>
-        <span class="bp">self</span><span class="o">.</span><span class="n">data</span> <span class="o">=</span> <span class="n">_init_data</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">allow_empty</span><span class="o">=</span><span class="bp">False</span><span class="p">,</span> <span class="n">default_name</span><span class="o">=</span><span class="n">data_name</span><span class="p">)</span>
-        <span class="bp">self</span><span class="o">.</span><span class="n">label</span> <span class="o">=</span> <span class="n">_init_data</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">allow_empty</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span> <span class="n">default_name</span><span class="o">=</span><span class="n">label_name</span><span class="p">)</span>
 
         <span class="bp">self</span><span class="o">.</span><span class="n">idx</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">arange</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">data</span><span class="p">[</span><span class="mi">0</span><span class="p">][</span><span class="mi">1</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span> [...]
         <span class="c1"># shuffle data</span>
diff --git a/versions/master/_modules/mxnet/kvstore.html b/versions/master/_modules/mxnet/kvstore.html
index 5d85337..0124b8e 100644
--- a/versions/master/_modules/mxnet/kvstore.html
+++ b/versions/master/_modules/mxnet/kvstore.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/lr_scheduler.html b/versions/master/_modules/mxnet/lr_scheduler.html
index b12c9b2..85414b1 100644
--- a/versions/master/_modules/mxnet/lr_scheduler.html
+++ b/versions/master/_modules/mxnet/lr_scheduler.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/metric.html b/versions/master/_modules/mxnet/metric.html
index acec883..8d0a279 100644
--- a/versions/master/_modules/mxnet/metric.html
+++ b/versions/master/_modules/mxnet/metric.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/model.html b/versions/master/_modules/mxnet/model.html
index 491e3ae..4895e0f 100644
--- a/versions/master/_modules/mxnet/model.html
+++ b/versions/master/_modules/mxnet/model.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/module/base_module.html b/versions/master/_modules/mxnet/module/base_module.html
index eba7c6c..38951bf 100644
--- a/versions/master/_modules/mxnet/module/base_module.html
+++ b/versions/master/_modules/mxnet/module/base_module.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/module/bucketing_module.html b/versions/master/_modules/mxnet/module/bucketing_module.html
index dcc73fb..79c1eca 100644
--- a/versions/master/_modules/mxnet/module/bucketing_module.html
+++ b/versions/master/_modules/mxnet/module/bucketing_module.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/module/module.html b/versions/master/_modules/mxnet/module/module.html
index 36f2482..32daea5 100644
--- a/versions/master/_modules/mxnet/module/module.html
+++ b/versions/master/_modules/mxnet/module/module.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/module/python_module.html b/versions/master/_modules/mxnet/module/python_module.html
index f008b34..4dd1980 100644
--- a/versions/master/_modules/mxnet/module/python_module.html
+++ b/versions/master/_modules/mxnet/module/python_module.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/module/sequential_module.html b/versions/master/_modules/mxnet/module/sequential_module.html
index 11af22f..78147e5 100644
--- a/versions/master/_modules/mxnet/module/sequential_module.html
+++ b/versions/master/_modules/mxnet/module/sequential_module.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/ndarray/contrib.html b/versions/master/_modules/mxnet/ndarray/contrib.html
index f199a9d..91e4e20 100644
--- a/versions/master/_modules/mxnet/ndarray/contrib.html
+++ b/versions/master/_modules/mxnet/ndarray/contrib.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/ndarray/ndarray.html b/versions/master/_modules/mxnet/ndarray/ndarray.html
index 28c4329..954fb0b 100644
--- a/versions/master/_modules/mxnet/ndarray/ndarray.html
+++ b/versions/master/_modules/mxnet/ndarray/ndarray.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/ndarray/random.html b/versions/master/_modules/mxnet/ndarray/random.html
index 4ef7348..5c7f511 100644
--- a/versions/master/_modules/mxnet/ndarray/random.html
+++ b/versions/master/_modules/mxnet/ndarray/random.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/ndarray/sparse.html b/versions/master/_modules/mxnet/ndarray/sparse.html
index 33777ef..7f04675 100644
--- a/versions/master/_modules/mxnet/ndarray/sparse.html
+++ b/versions/master/_modules/mxnet/ndarray/sparse.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/ndarray/utils.html b/versions/master/_modules/mxnet/ndarray/utils.html
index bfab605..97a8973 100644
--- a/versions/master/_modules/mxnet/ndarray/utils.html
+++ b/versions/master/_modules/mxnet/ndarray/utils.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/optimizer.html b/versions/master/_modules/mxnet/optimizer.html
index cde75b2..f2566f5 100644
--- a/versions/master/_modules/mxnet/optimizer.html
+++ b/versions/master/_modules/mxnet/optimizer.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -226,13 +226,12 @@
 <span class="sd">"""Weight updating functions."""</span>
 <span class="kn">import</span> <span class="nn">math</span>
 <span class="kn">import</span> <span class="nn">pickle</span>
-<span class="kn">import</span> <span class="nn">logging</span>
 <span class="kn">import</span> <span class="nn">warnings</span>
 <span class="kn">import</span> <span class="nn">numpy</span>
 <span class="kn">from</span> <span class="nn">.base</span> <span class="kn">import</span> <span class="n">py_str</span>
 <span class="kn">from</span> <span class="nn">.ndarray</span> <span class="kn">import</span> <span class="p">(</span><span class="n">NDArray</span><span class="p">,</span> <span class="n">zeros</span><span class="p">,</span> <span class="n">clip</span><span class="p">,</span> <span class="n">sqrt</span><span class="p">,</span> <span class="n">cast</span><span class="p">,</span> <span class="n">maximum</span><span class="p">,</span> <span class="nb">abs</span> <span class="k">as</span> <s [...]
 <span class="kn">from</span> <span class="nn">.ndarray</span> <span class="kn">import</span> <span class="p">(</span><span class="n">sgd_update</span><span class="p">,</span> <span class="n">sgd_mom_update</span><span class="p">,</span> <span class="n">adam_update</span><span class="p">,</span> <span class="n">rmsprop_update</span><span class="p">,</span> <span class="n">rmspropalex_update</span><span class="p">,</span>
-                      <span class="n">mp_sgd_update</span><span class="p">,</span> <span class="n">mp_sgd_mom_update</span><span class="p">,</span> <span class="n">square</span><span class="p">,</span> <span class="n">ftrl_update</span><span class="p">)</span>
+                      <span class="n">mp_sgd_update</span><span class="p">,</span> <span class="n">mp_sgd_mom_update</span><span class="p">,</span> <span class="n">square</span><span class="p">,</span> <span class="n">ftrl_update</span><span class="p">,</span> <span class="n">ftml_update</span><span class="p">)</span>
 <span class="kn">from</span> <span class="nn">.ndarray</span> <span class="kn">import</span> <span class="n">_internal</span>
 <span class="kn">from</span> <span class="nn">.ndarray</span> <span class="kn">import</span> <span class="n">op</span>
 <span class="kn">from</span> <span class="nn">.ndarray</span> <span class="kn">import</span> <span class="n">sparse</span>
@@ -279,7 +278,7 @@
 
 <span class="sd">    Properties</span>
 <span class="sd">    ----------</span>
-<span class="sd">    learning_rate: float</span>
+<span class="sd">    learning_rate : float</span>
 <span class="sd">        The current learning rate of the optimizer. Given an Optimizer object</span>
 <span class="sd">        optimizer, its learning rate can be accessed as optimizer.learning_rate.</span>
 <span class="sd">    """</span>
@@ -335,11 +334,10 @@
         <span class="k">assert</span><span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">klass</span><span class="p">,</span> <span class="nb">type</span><span class="p">))</span>
         <span class="n">name</span> <span class="o">=</span> <span class="n">klass</span><span class="o">.</span><span class="vm">__name__</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span>
         <span class="k">if</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">Optimizer</span><span class="o">.</span><span class="n">opt_registry</span><span class="p">:</span>
-            <span class="n">logging</span><span class="o">.</span><span class="n">warning</span><span class="p">(</span><span class="s1">'WARNING: New optimizer </span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> is overriding '</span>
-                            <span class="s1">'existing optimizer </span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span>
-                            <span class="n">klass</span><span class="o">.</span><span class="vm">__module__</span><span class="p">,</span> <span class="n">klass</span><span class="o">.</span><span class="vm">__name__</span><span class="p">,</span>
-                            <span class="n">Optimizer</span><span class="o">.</span><span class="n">opt_registry</span><span class="p">[</span><span class="n">name</span><span class="p">]</span><span class="o">.</span><span class="vm">__module__</span><span class="p">,</span>
-                            <span class="n">Optimizer</span><span class="o">.</span><span class="n">opt_registry</span><span class="p">[</span><span class="n">name</span><span class="p">]</span><span class="o">.</span><span class="vm">__name__</span><span class="p">)</span>
+            <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">'WARNING: New optimizer </span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> is overriding existing '</span>
+                          <span class="s1">'optimizer </span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">klass</span><span class="o">.</span><span class="vm">__module__</span><span class="p">,</span> <span class="n">klass</span><span class="o">.</span><span class="vm">__name__</span><span class="p">,</span>
+                          <span class="n">Optimizer</span><span class="o">.</span><span class="n">opt_registry</span><span class="p">[</span><span class="n">name</span><span class="p">]</span><span class="o">.</span><span class="vm">__module__</span><span class="p">,</span>
+                          <span class="n">Optimizer</span><span class="o">.</span><span class="n">opt_registry</span><span class="p">[</span><span class="n">name</span><span class="p">]</span><span class="o">.</span><span class="vm">__name__</span><span class="p">)</span>
         <span class="n">Optimizer</span><span class="o">.</span><span class="n">opt_registry</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">klass</span>
         <span class="k">return</span> <span class="n">klass</span></div>
 
@@ -737,6 +735,55 @@
         <span class="bp">self</span><span class="o">.</span><span class="n">_update_impl</span><span class="p">(</span><span class="n">index</span><span class="p">,</span> <span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="p">,</span> <span class="n">state</span><span class="p">,</span>
                           <span class="n">multi_precision</span><span class="o">=</span><span class="n">use_multi_precision</span><span class="p">)</span></div>
 
+
+<span class="nd">@register</span>
+<div class="viewcode-block" id="FTML"><a class="viewcode-back" href="../../api/python/optimization/optimization.html#mxnet.optimizer.FTML">[docs]</a><span class="k">class</span> <span class="nc">FTML</span><span class="p">(</span><span class="n">Optimizer</span><span class="p">):</span>
+    <span class="sd">"""The FTML optimizer.</span>
+
+<span class="sd">    This class implements the optimizer described in</span>
+<span class="sd">    *FTML - Follow the Moving Leader in Deep Learning*,</span>
+<span class="sd">    available at http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf.</span>
+
+<span class="sd">    This optimizer accepts the following parameters in addition to those accepted</span>
+<span class="sd">    by :class:`.Optimizer`.</span>
+
+<span class="sd">    Parameters</span>
+<span class="sd">    ----------</span>
+<span class="sd">    beta1 : float, optional</span>
+<span class="sd">        0 < beta1 < 1. Generally close to 0.5.</span>
+<span class="sd">    beta2 : float, optional</span>
+<span class="sd">        0 < beta2 < 1. Generally close to 1.</span>
+<span class="sd">    epsilon : float, optional</span>
+<span class="sd">        Small value to avoid division by 0.</span>
+<span class="sd">    """</span>
+    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">beta1</span><span class="o">=</span><span class="mf">0.6</span><span class="p">,</span> <span class="n">beta2</span><span class="o">=</span><span class="mf">0.999</span><span class="p">,</span> <span class="n">epsilon</span><span class="o">=</span><span class="mf">1e-8</span><span class="p">,</span> <span class="o">**</span><span clas [...]
+        <span class="nb">super</span><span class="p">(</span><span class="n">FTML</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">beta1</span> <span class="o">=</span> <span class="n">beta1</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">beta2</span> <span class="o">=</span> <span class="n">beta2</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">epsilon</span> <span class="o">=</span> <span class="n">epsilon</span>
+
+    <span class="k">def</span> <span class="nf">create_state</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">index</span><span class="p">,</span> <span class="n">weight</span><span class="p">):</span>
+        <span class="k">return</span> <span class="p">(</span><span class="n">zeros</span><span class="p">(</span><span class="n">weight</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">weight</span><span class="o">.</span><span class="n">context</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">weight</span><span class="o">.</span><span class="n">dtype</span><span class="p">),</span> <span [...]
+                <span class="n">zeros</span><span class="p">(</span><span class="n">weight</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">weight</span><span class="o">.</span><span class="n">context</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">weight</span><span class="o">.</span><span class="n">dtype</span><span class="p">),</span> <span class="c1"># v_0</span>
+                <span class="n">zeros</span><span class="p">(</span><span class="n">weight</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">weight</span><span class="o">.</span><span class="n">context</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">weight</span><span class="o">.</span><span class="n">dtype</span><span class="p">))</span> <span class="c1"># z_0</span>
+
+    <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">index</span><span class="p">,</span> <span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="p">,</span> <span class="n">state</span><span class="p">):</span>
+        <span class="k">assert</span><span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">weight</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">))</span>
+        <span class="k">assert</span><span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">grad</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">))</span>
+        <span class="bp">self</span><span class="o">.</span><span class="n">_update_count</span><span class="p">(</span><span class="n">index</span><span class="p">)</span>
+        <span class="n">lr</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_lr</span><span class="p">(</span><span class="n">index</span><span class="p">)</span>
+        <span class="n">wd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_wd</span><span class="p">(</span><span class="n">index</span><span class="p">)</span>
+        <span class="n">t</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_index_update_count</span><span class="p">[</span><span class="n">index</span><span class="p">]</span>
+
+        <span class="n">kwargs</span> <span class="o">=</span> <span class="p">{</span><span class="s1">'beta1'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">beta1</span><span class="p">,</span> <span class="s1">'beta2'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">beta2</span><span class="p">,</span> <span class="s1">'epsilon'</span><span class="p">:</span> <span class="bp">self</sp [...]
+                  <span class="s1">'rescale_grad'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">rescale_grad</span><span class="p">,</span> <span class="s1">'t'</span><span class="p">:</span> <span class="n">t</span><span class="p">}</span>
+        <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span><span class="p">:</span>
+            <span class="n">kwargs</span><span class="p">[</span><span class="s1">'clip_grad'</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span>
+
+        <span class="n">prev_d</span><span class="p">,</span> <span class="n">prev_v</span><span class="p">,</span> <span class="n">prev_z</span> <span class="o">=</span> <span class="n">state</span>
+        <span class="n">ftml_update</span><span class="p">(</span><span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="p">,</span> <span class="n">prev_d</span><span class="p">,</span> <span class="n">prev_v</span><span class="p">,</span> <span class="n">prev_z</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">weight</span><span class="p">,</span>
+                    <span class="n">lr</span><span class="o">=</span><span class="n">lr</span><span class="p">,</span> <span class="n">wd</span><span class="o">=</span><span class="n">wd</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
+
 <span class="c1"># pylint: enable=line-too-long</span>
 <span class="nd">@register</span>
 <div class="viewcode-block" id="DCASGD"><a class="viewcode-back" href="../../api/python/optimization/optimization.html#mxnet.optimizer.DCASGD">[docs]</a><span class="k">class</span> <span class="nc">DCASGD</span><span class="p">(</span><span class="n">Optimizer</span><span class="p">):</span>
@@ -856,7 +903,8 @@
         <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span>
             <span class="n">grad</span> <span class="o">=</span> <span class="n">clip</span><span class="p">(</span><span class="n">grad</span><span class="p">,</span> <span class="o">-</span><span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span><span class="p">)</span>
         <span class="n">weight</span><span class="p">[:]</span> <span class="o">+=</span> <span class="o">-</span> <span class="n">lr</span><span class="o">/</span><span class="mi">2</span> <span class="o">*</span> <span class="p">(</span><span class="n">grad</span> <span class="o">+</span> <span class="n">wd</span> <span class="o">*</span> <span class="n">weight</span><span class="p">)</span> <span class="o">+</span> <span class="n">normal</span><span class="p">(</span><span class="mi"> [...]
-                                                            <span class="n">weight</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">weight</span><span class="o">.</span><span class="n">context</span><span class="p">)</span></div>
+                                                            <span class="n">shape</span><span class="o">=</span><span class="n">weight</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span>
+                                                            <span class="n">ctx</span><span class="o">=</span><span class="n">weight</span><span class="o">.</span><span class="n">context</span><span class="p">)</span></div>
 
 
 <span class="nd">@register</span>  <span class="c1"># pylint: disable=invalid-name</span>
@@ -1306,7 +1354,7 @@
         <span class="n">t</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_index_update_count</span><span class="p">[</span><span class="n">index</span><span class="p">]</span>
 
         <span class="c1"># preprocess grad</span>
-        <span class="n">grad</span> <span class="o">*=</span> <span class="bp">self</span><span class="o">.</span><span class="n">rescale_grad</span> <span class="o">+</span> <span class="n">wd</span> <span class="o">*</span> <span class="n">weight</span>
+        <span class="n">grad</span> <span class="o">=</span> <span class="n">grad</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">rescale_grad</span> <span class="o">+</span> <span class="n">wd</span> <span class="o">*</span> <span class="n">weight</span>
         <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span>
             <span class="n">grad</span> <span class="o">=</span> <span class="n">clip</span><span class="p">(</span><span class="n">grad</span><span class="p">,</span> <span class="o">-</span><span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">clip_gradient</span><span class="p">)</span>
 
diff --git a/versions/master/_modules/mxnet/random.html b/versions/master/_modules/mxnet/random.html
index 3168ac1..8f27606 100644
--- a/versions/master/_modules/mxnet/random.html
+++ b/versions/master/_modules/mxnet/random.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/recordio.html b/versions/master/_modules/mxnet/recordio.html
index 693640e..547e46d 100644
--- a/versions/master/_modules/mxnet/recordio.html
+++ b/versions/master/_modules/mxnet/recordio.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/rnn/io.html b/versions/master/_modules/mxnet/rnn/io.html
index 7194109..42d16f2 100644
--- a/versions/master/_modules/mxnet/rnn/io.html
+++ b/versions/master/_modules/mxnet/rnn/io.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/rnn/rnn.html b/versions/master/_modules/mxnet/rnn/rnn.html
index 40d08b7..648fdc1 100644
--- a/versions/master/_modules/mxnet/rnn/rnn.html
+++ b/versions/master/_modules/mxnet/rnn/rnn.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/rnn/rnn_cell.html b/versions/master/_modules/mxnet/rnn/rnn_cell.html
index 146bf06..4c005bd 100644
--- a/versions/master/_modules/mxnet/rnn/rnn_cell.html
+++ b/versions/master/_modules/mxnet/rnn/rnn_cell.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/rtc.html b/versions/master/_modules/mxnet/rtc.html
index 0df8a85..3d58c5b 100644
--- a/versions/master/_modules/mxnet/rtc.html
+++ b/versions/master/_modules/mxnet/rtc.html
@@ -51,7 +51,7 @@
 <link href="../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/symbol/contrib.html b/versions/master/_modules/mxnet/symbol/contrib.html
index 72e666b..5fde3b7 100644
--- a/versions/master/_modules/mxnet/symbol/contrib.html
+++ b/versions/master/_modules/mxnet/symbol/contrib.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/symbol/random.html b/versions/master/_modules/mxnet/symbol/random.html
index d2531ce..654a418 100644
--- a/versions/master/_modules/mxnet/symbol/random.html
+++ b/versions/master/_modules/mxnet/symbol/random.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_modules/mxnet/symbol/symbol.html b/versions/master/_modules/mxnet/symbol/symbol.html
index 1d90e53..a00b417 100644
--- a/versions/master/_modules/mxnet/symbol/symbol.html
+++ b/versions/master/_modules/mxnet/symbol/symbol.html
@@ -51,7 +51,7 @@
 <link href="../../index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -262,6 +262,9 @@
     <span class="c1"># pylint: disable=no-member</span>
     <span class="vm">__slots__</span> <span class="o">=</span> <span class="p">[]</span>
 
+    <span class="c1"># Make numpy functions return Symbol instead of numpy object array</span>
+    <span class="n">__array_priority__</span> <span class="o">=</span> <span class="mf">1000.0</span>
+
 <div class="viewcode-block" id="Symbol.__repr__"><a class="viewcode-back" href="../../../api/python/symbol/symbol.html#mxnet.symbol.Symbol.__repr__">[docs]</a>    <span class="k">def</span> <span class="fm">__repr__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
         <span class="sd">"""Gets a string representation of the symbol."""</span>
         <span class="n">name</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">name</span>
@@ -697,14 +700,16 @@
 <span class="sd">            Indexing key</span>
 
 <span class="sd">        """</span>
-        <span class="n">output_names</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">list_outputs</span><span class="p">()</span>
+        <span class="n">output_count</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span>
         <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">index</span><span class="p">,</span> <span class="n">py_slice</span><span class="p">):</span>
             <span class="n">start</span> <span class="o">=</span> <span class="mi">0</span> <span class="k">if</span> <span class="n">index</span><span class="o">.</span><span class="n">start</span> <span class="ow">is</span> <span class="bp">None</span> <span class="k">else</span> <span class="n">index</span><span class="o">.</span><span class="n">start</span>
-            <span class="n">stop</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">output_names</span><span class="p">)</span> <span class="k">if</span> <span class="n">index</span><span class="o">.</span><span class="n">stop</span> <span class="ow">is</span> <span class="bp">None</span> <span class="k">else</span> <span class="n">index</span><span class="o">.</span><span class="n">stop</span>
+            <span class="n">stop</span> <span class="o">=</span> <span class="n">output_count</span> <span class="k">if</span> <span class="n">index</span><span class="o">.</span><span class="n">stop</span> <span class="ow">is</span> <span class="bp">None</span> <span class="k">else</span> <span class="n">index</span><span class="o">.</span><span class="n">stop</span>
             <span class="n">step</span> <span class="o">=</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">index</span><span class="o">.</span><span class="n">step</span> <span class="ow">is</span> <span class="bp">None</span> <span class="k">else</span> <span class="n">index</span><span class="o">.</span><span class="n">step</span>
             <span class="k">return</span> <span class="n">Group</span><span class="p">([</span><span class="bp">self</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">step</span><span class="p">)])</span>
 
         <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">index</span><span class="p">,</span> <span class="n">string_types</span><span class="p">):</span>
+            <span class="c1"># Returning this list of names is expensive. Some symbols may have hundreds of outputs</span>
+            <span class="n">output_names</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">list_outputs</span><span class="p">()</span>
             <span class="n">idx</span> <span class="o">=</span> <span class="bp">None</span>
             <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">name</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">output_names</span><span class="p">):</span>
                 <span class="k">if</span> <span class="n">name</span> <span class="o">==</span> <span class="n">index</span><span class="p">:</span>
@@ -717,7 +722,7 @@
 
         <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">index</span><span class="p">,</span> <span class="nb">int</span><span class="p">):</span>
             <span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s1">'Symbol only support integer index to fetch i-th output'</span><span class="p">)</span>
-        <span class="k">if</span> <span class="n">index</span> <span class="o">>=</span> <span class="nb">len</span><span class="p">(</span><span class="n">output_names</span><span class="p">):</span>
+        <span class="k">if</span> <span class="n">index</span> <span class="o">>=</span> <span class="n">output_count</span><span class="p">:</span>
             <span class="c1"># Important, python determines the end by this exception</span>
             <span class="k">raise</span> <span class="ne">IndexError</span>
         <span class="n">handle</span> <span class="o">=</span> <span class="n">SymbolHandle</span><span class="p">()</span>
@@ -951,6 +956,25 @@
             <span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">size</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">sarr</span><span class="p">)))</span>
         <span class="k">return</span> <span class="p">[</span><span class="n">py_str</span><span class="p">(</span><span class="n">sarr</span><span class="p">[</span><span class="n">i</span><span class="p">])</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">size</span><span class="o">.</span><span class="n">value</span><span class="p">)]</span></div>
 
+<div class="viewcode-block" id="Symbol.__len__"><a class="viewcode-back" href="../../../api/python/symbol/symbol.html#mxnet.symbol.Symbol.__len__">[docs]</a>    <span class="k">def</span> <span class="fm">__len__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="sd">"""Get number of outputs for the symbol.</span>
+
+<span class="sd">        Example</span>
+<span class="sd">        -------</span>
+<span class="sd">        >>> a = mx.sym.var('a')</span>
+<span class="sd">        >>> b = mx.sym.var('b')</span>
+<span class="sd">        >>> c = a + b</span>
+<span class="sd">        >>> len(c)</span>
+
+<span class="sd">        Returns</span>
+<span class="sd">        -------</span>
+<span class="sd">        len(self): Number of outputs</span>
+<span class="sd">            Number of outputs</span>
+<span class="sd">        """</span>
+        <span class="n">output_count</span> <span class="o">=</span> <span class="n">mx_uint</span><span class="p">()</span>
+        <span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXSymbolGetNumOutputs</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">output_count</span><span class="p">)))</span>
+        <span class="k">return</span> <span class="n">output_count</span><span class="o">.</span><span class="n">value</span></div>
+
 <div class="viewcode-block" id="Symbol.list_auxiliary_states"><a class="viewcode-back" href="../../../api/python/symbol/symbol.html#mxnet.symbol.Symbol.list_auxiliary_states">[docs]</a>    <span class="k">def</span> <span class="nf">list_auxiliary_states</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
         <span class="sd">"""Lists all the auxiliary states in the symbol.</span>
 
diff --git a/versions/master/_modules/symbol.html b/versions/master/_modules/symbol.html
index 90bcb89..c8a0018 100644
--- a/versions/master/_modules/symbol.html
+++ b/versions/master/_modules/symbol.html
@@ -51,7 +51,7 @@
 <link href="index.html" rel="up" title="Module code">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/_sources/api/python/gluon/model_zoo.txt b/versions/master/_sources/api/python/gluon/model_zoo.txt
index 8310461..27d2647 100644
--- a/versions/master/_sources/api/python/gluon/model_zoo.txt
+++ b/versions/master/_sources/api/python/gluon/model_zoo.txt
@@ -28,6 +28,41 @@ In the rest of this document, we list routines provided by the `gluon.model_zoo`
 .. automodule:: mxnet.gluon.model_zoo.vision
 ```
 
+The following table summarizes the available models.
+
+| Alias         | Network                                                                               | # Parameters | Top-1 Accuracy | Top-5 Accuracy | Origin                                                                                                                                               |
+|---------------|---------------------------------------------------------------------------------------|--------------|----------------|----------------|------------------------------------------------------------------------------------------------------------------------------------------------------|
+| alexnet       | [AlexNet](https://arxiv.org/abs/1404.5997)                                            | 61,100,840   | 0.5492         | 0.7803         | Converted from pytorch vision                                                                                                                        |
+| densenet121   | [DenseNet-121](https://arxiv.org/pdf/1608.06993.pdf)                                  | 8,062,504    | 0.7497         | 0.9225         | Converted from pytorch vision                                                                                                                        |
+| densenet161   | [DenseNet-161](https://arxiv.org/pdf/1608.06993.pdf)                                  | 28,900,936   | 0.7770         | 0.9380         | Converted from pytorch vision                                                                                                                        |
+| densenet169   | [DenseNet-169](https://arxiv.org/pdf/1608.06993.pdf)                                  | 14,307,880   | 0.7617         | 0.9317         | Converted from pytorch vision                                                                                                                        |
+| densenet201   | [DenseNet-201](https://arxiv.org/pdf/1608.06993.pdf)                                  | 20,242,984   | 0.7732         | 0.9362         | Converted from pytorch vision                                                                                                                        |
+| inceptionv3   | [Inception V3 299x299](http://arxiv.org/abs/1512.00567)                               | 23,869,000   | 0.7755         | 0.9364         | Converted from pytorch vision                                                                                                                        |
+| mobilenet0.25 | [MobileNet 0.25](https://arxiv.org/abs/1704.04861)                                    | 475,544      | 0.5185         | 0.7608         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| mobilenet0.5  | [MobileNet 0.5](https://arxiv.org/abs/1704.04861)                                     | 1,342,536    | 0.6307         | 0.8475         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| mobilenet0.75 | [MobileNet 0.75](https://arxiv.org/abs/1704.04861)                                    | 2,601,976    | 0.6738         | 0.8782         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| mobilenet1.0  | [MobileNet 1.0](https://arxiv.org/abs/1704.04861)                                     | 4,253,864    | 0.7105         | 0.9006         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| resnet18_v1   | [ResNet-18 V1](http://arxiv.org/abs/1512.03385)                                       | 11,699,112   | 0.6803         | 0.8818         | Converted from pytorch vision                                                                                                                        |
+| resnet34_v1   | [ResNet-34 V1](http://arxiv.org/abs/1512.03385)                                       | 21,814,696   | 0.7202         | 0.9066         | Converted from pytorch vision                                                                                                                        |
+| resnet50_v1   | [ResNet-50 V1](http://arxiv.org/abs/1512.03385)                                       | 25,629,032   | 0.7540         | 0.9266         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| resnet101_v1  | [ResNet-101 V1](http://arxiv.org/abs/1512.03385)                                      | 44,695,144   | 0.7693         | 0.9334         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| resnet152_v1  | [ResNet-152 V1](http://arxiv.org/abs/1512.03385)                                      | 60,404,072   | 0.7727         | 0.9353         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| resnet18_v2   | [ResNet-18 V2](https://arxiv.org/abs/1603.05027)                                      | 11,695,796   | 0.6961         | 0.8901         | Trained with [script](https://github.com/apache/incubator-mxnet/blob/4dcd96ae2f6820e01455079d00f49db1cd21eda9/example/gluon/image_classification.py) |
+| resnet34_v2   | [ResNet-34 V2](https://arxiv.org/abs/1603.05027)                                      | 21,811,380   | 0.7324         | 0.9125         | Trained with [script](https://github.com/apache/incubator-mxnet/blob/4dcd96ae2f6820e01455079d00f49db1cd21eda9/example/gluon/image_classification.py) |
+| resnet50_v2   | [ResNet-50 V2](https://arxiv.org/abs/1603.05027)                                      | 25,595,060   | 0.7622         | 0.9297         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| resnet101_v2  | [ResNet-101 V2](https://arxiv.org/abs/1603.05027)                                     | 44,639,412   | 0.7747         | 0.9375         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| resnet152_v2  | [ResNet-152 V2](https://arxiv.org/abs/1603.05027)                                     | 60,329,140   | 0.7833         | 0.9409         | Trained with [script](https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py)              |
+| squeezenet1.0 | [SqueezeNet 1.0](https://arxiv.org/abs/1602.07360)                                    | 1,248,424    | 0.5611         | 0.7909         | Converted from pytorch vision                                                                                                                        |
+| squeezenet1.1 | [SqueezeNet 1.1](https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1) | 1,235,496    | 0.5496         | 0.7817         | Converted from pytorch vision                                                                                                                        |
+| vgg11         | [VGG-11](https://arxiv.org/abs/1409.1556)                                             | 132,863,336  | 0.6662         | 0.8734         | Converted from pytorch vision                                                                                                                        |
+| vgg13         | [VGG-13](https://arxiv.org/abs/1409.1556)                                             | 133,047,848  | 0.6774         | 0.8811         | Converted from pytorch vision                                                                                                                        |
+| vgg16         | [VGG-16](https://arxiv.org/abs/1409.1556)                                             | 138,357,544  | 0.6986         | 0.8945         | Converted from pytorch vision                                                                                                                        |
+| vgg19         | [VGG-19](https://arxiv.org/abs/1409.1556)                                             | 143,667,240  | 0.7072         | 0.8988         | Converted from pytorch vision                                                                                                                        |
+| vgg11_bn      | [VGG-11 with batch normalization](https://arxiv.org/abs/1409.1556)                    | 132,874,344  | 0.6859         | 0.8872         | Converted from pytorch vision                                                                                                                        |
+| vgg13_bn      | [VGG-13 with batch normalization](https://arxiv.org/abs/1409.1556)                    | 133,059,624  | 0.6884         | 0.8882         | Converted from pytorch vision                                                                                                                        |
+| vgg16_bn      | [VGG-16 with batch normalization](https://arxiv.org/abs/1409.1556)                    | 138,374,440  | 0.7142         | 0.9043         | Converted from pytorch vision                                                                                                                        |
+| vgg19_bn      | [VGG-19 with batch normalization](https://arxiv.org/abs/1409.1556)                    | 143,689,256  | 0.7241         | 0.9093         | Converted from pytorch vision                                                                                                                        |
+
 ```eval_rst
 .. autosummary::
     :nosignatures:
diff --git a/versions/master/_sources/api/python/ndarray/sparse.txt b/versions/master/_sources/api/python/ndarray/sparse.txt
index dd0286d..3e6021e 100644
--- a/versions/master/_sources/api/python/ndarray/sparse.txt
+++ b/versions/master/_sources/api/python/ndarray/sparse.txt
@@ -61,9 +61,10 @@ A detailed tutorial is available at
 [RowSparseNDArray - NDArray for Sparse Gradient Updates](https://mxnet.incubator.apache.org/versions/master/tutorials/sparse/row_sparse.html).
 <br><br>
 
-
 ```eval_rst
 
+.. note:: ``mxnet.ndarray.sparse.RowSparseNDArray`` and ``mxnet.ndarray.sparse.CSRNDArray`` DO NOT support the ``mxnet.gluon`` high-level interface yet.
+
 .. note:: ``mxnet.ndarray.sparse`` is similar to ``mxnet.ndarray`` in some aspects. But the differences are not negligible. For instance:
 
    - Only a subset of operators in ``mxnet.ndarray`` have specialized implementations in ``mxnet.ndarray.sparse``.
diff --git a/versions/master/_sources/faq/index.txt b/versions/master/_sources/faq/index.txt
index 68c7d41..e5807f4 100644
--- a/versions/master/_sources/faq/index.txt
+++ b/versions/master/_sources/faq/index.txt
@@ -58,8 +58,6 @@ and full working examples, visit the [tutorials section](../tutorials/index.md).
 
 * [How do I set MXNet's environmental variables?](http://mxnet.io/how_to/env_var.html)
 
-* [How do I use MXNet as a front end for Torch?](http://mxnet.io/how_to/torch.html)
-
 ## Questions about Using MXNet
 If you need help with using MXNet, have questions about applying it to a particular kind of problem, or have a discussion topic, please use our [forum](https://discuss.mxnet.io).
 
diff --git a/versions/master/_sources/install/build_from_source.txt b/versions/master/_sources/install/build_from_source.txt
index 82baa1b..4f7083a 100644
--- a/versions/master/_sources/install/build_from_source.txt
+++ b/versions/master/_sources/install/build_from_source.txt
@@ -319,6 +319,43 @@ These commands produce a library called ```mxnet.dll``` in the ```./build/Releas
 
 </div>
 
+<div class="linux ubuntu">
+
+## Build MXNet using NCCL
+- Download and install the latest NCCL library from NVIDIA.
+- Note the directory path in which NCCL libraries and header files are installed.
+- Ensure that the installation directory contains ```lib``` and ```include``` folders.
+- Ensure that the prerequisites for using NCCL such as Cuda libraries are met. 
+- Append the ```config.mk``` file with following, in addition to the CUDA related options.
+- USE_NCCL=1
+- USE_NCCL_PATH=path-to-nccl-installation-folder
+``` bash
+echo "USE_NCCL=1" >> make/config.mk
+echo "USE_NCCP_PATH=path-to-nccl-installation-folder" >> make/config.mk
+cp make/config.mk .
+```
+- Run make command
+``` bash
+make -j"$(nproc)"
+```
+
+## Validation
+- Follow the steps to install MXNet Python binding.
+- Comment the following line in ```test_nccl.py``` file at ```incubator-mxnet/tests/python/gpu/test_nccl.py```
+``` bash
+@unittest.skip("Test requires NCCL library installed and enabled during build")
+```
+- Run test_nccl.py script as follows. The test should complete. It does not produce any output.
+``` bash
+nosetests --verbose tests/python/gpu/test_nccl.py
+```
+
+## Recommendation for best performance
+It is recommended to set environment variable NCCL_LAUNCH_MODE to PARALLEL when using NCCL version 2.1 or newer.
+
+
+</div>
+
 ## Build the C++ package
 The C++ package has the same prerequisites as the MXNet library, you should also have `python` installed. (Both `python` 2 and 3 are supported)
 
diff --git a/versions/master/_sources/install/index.txt b/versions/master/_sources/install/index.txt
index 24d6aee..7cdc44c 100644
--- a/versions/master/_sources/install/index.txt
+++ b/versions/master/_sources/install/index.txt
@@ -229,7 +229,7 @@ $ sudo apt-get install -y libopencv-dev
 **Step 4** Download MXNet sources and build MXNet core shared library.
 
 ```bash
-$ git clone --recursive https://github.com/apache/incubator-mxnet 
+$ git clone --recursive https://github.com/apache/incubator-mxnet
 $ cd incubator-mxnet
 $ make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas
 ```
@@ -240,10 +240,10 @@ $ make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas
 
 **Build the MXNet Python binding**
 
-**Step 1** Install prerequisites - python, setup-tools, python-pip and numpy.
+**Step 1** Install prerequisites - python, setup-tools, python-pip and libfortran (required for Numpy).
 
 ```bash
-$ sudo apt-get install -y python-dev python-setuptools python-numpy python-pip
+$ sudo apt-get install -y python-dev python-setuptools python-pip libgfortran3
 ```
 
 **Step 2** Install the MXNet Python binding.
@@ -284,8 +284,8 @@ The following installation instructions have been tested on Ubuntu 14.04 and 16.
 
 Install the following NVIDIA libraries to setup *MXNet* with GPU support:
 
-1. Install CUDA 8.0 following the NVIDIA's [installation guide](http://docs.nvidia.com/cuda/cuda-installation-guide-linux/).
-2. Install cuDNN 5 for CUDA 8.0 following the NVIDIA's [installation guide](https://developer.nvidia.com/cudnn). You may need to register with NVIDIA for downloading the cuDNN library.
+1. Install CUDA 9.0 following the NVIDIA's [installation guide](http://docs.nvidia.com/cuda/cuda-installation-guide-linux/).
+2. Install cuDNN 7 for CUDA 9.0 following the NVIDIA's [installation guide](https://developer.nvidia.com/cudnn). You may need to register with NVIDIA for downloading the cuDNN library.
 
 **Note:** Make sure to add CUDA install path to `LD_LIBRARY_PATH`.
 
@@ -304,10 +304,10 @@ $ sudo apt-get install -y wget python
 $ wget https://bootstrap.pypa.io/get-pip.py && sudo python get-pip.py
 ```
 
-**Step 2**  Install *MXNet* with GPU support using CUDA 8.0
+**Step 2**  Install *MXNet* with GPU support using CUDA 9.0
 
 ```bash
-$ pip install mxnet-cu80
+$ pip install mxnet-cu90
 ```
 
 **Step 3**  Install [Graphviz](http://www.graphviz.org/). (Optional, needed for graph visualization using `mxnet.viz` package).
@@ -320,7 +320,7 @@ pip install graphviz
 
 **Experimental Choice** If You would like to install mxnet with Intel MKL, try the experimental pip package with MKL:
 ```bash
-$ pip install mxnet-cu80mkl
+$ pip install mxnet-cu90mkl
 ```
 
 </div>
@@ -364,10 +364,10 @@ Installing *MXNet* with pip requires a latest version of `pip`. Install the late
 (mxnet)$ pip install --upgrade pip
 ```
 
-Install *MXNet* with GPU support using CUDA 8.0.
+Install *MXNet* with GPU support using CUDA 9.0.
 
 ```bash
-(mxnet)$ pip install mxnet-cu80
+(mxnet)$ pip install mxnet-cu90
 ```
 
 **Step 4**  Install [Graphviz](http://www.graphviz.org/). (Optional, needed for graph visualization using `mxnet.viz` package).
@@ -468,10 +468,10 @@ $ make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/
 
 **Install the MXNet Python binding**
 
-**Step 1** Install prerequisites - python, setup-tools, python-pip and numpy.
+**Step 1** Install prerequisites - python, setup-tools, python-pip and libfortran (required for Numpy)..
 
 ```bash
-$ sudo apt-get install -y python-dev python-setuptools python-numpy python-pip
+$ sudo apt-get install -y python-dev python-setuptools python-pip libgfortran3
 ```
 
 **Step 2** Install the MXNet Python binding.
@@ -692,7 +692,7 @@ $ bash install-mxnet-osx-python.sh
 More details and verified installation instructions for macOS, with GPUs, coming soon.
 
 
-*MXNet* is expected to be compatible on macOS with NVIDIA GPUs. Please install CUDA 8.0 and cuDNN 5.0, prior to installing GPU version of *MXNet*.
+*MXNet* is expected to be compatible on macOS with NVIDIA GPUs. Please install CUDA 9.0 and cuDNN 7, prior to installing GPU version of *MXNet*.
 
 </div>
 </div>
@@ -704,11 +704,9 @@ More details and verified installation instructions for macOS, with GPUs, coming
 
 <div class="cloud">
 
-AWS Marketplace distributes AMIs (Amazon Machine Image) with MXNet pre-installed. You can launch an Amazon EC2 instance with one of the below AMIs:
-1. Deep Learning AMI (Amazon Machine Image) for [Ubuntu](https://aws.amazon.com/marketplace/pp/B06VSPXKDX)
-2. Deep Learning AMI for [Amazon Linux](https://aws.amazon.com/marketplace/pp/B01M0AXXQB)
+AWS Marketplace distributes Deep Learning AMIs (Amazon Machine Image) with MXNet pre-installed. You can launch one of these Deep Learning AMIs by following instructions in the [AWS Deep Learning AMI Developer Guide](http://docs.aws.amazon.com/dlami/latest/devguide/what-is-dlami.html).
 
-You could also run distributed deeplearning with *MXNet* on AWS using [Cloudformation Template](https://github.com/awslabs/deeplearning-cfn/blob/master/README.md).
+You can also run distributed deep learning with *MXNet* on AWS using [Cloudformation Template](https://github.com/awslabs/deeplearning-cfn/blob/master/README.md).
 
 </div>
 
@@ -814,8 +812,8 @@ The following installation instructions have been tested on Ubuntu 14.04 and 16.
 
 Install the following NVIDIA libraries to setup *MXNet* with GPU support:
 
-1. Install CUDA 8.0 following the NVIDIA's [installation guide](http://docs.nvidia.com/cuda/cuda-installation-guide-linux/).
-2. Install cuDNN 5 for CUDA 8.0 following the NVIDIA's [installation guide](https://developer.nvidia.com/cudnn). You may need to register with NVIDIA for downloading the cuDNN library.
+1. Install CUDA 9.0 following the NVIDIA's [installation guide](http://docs.nvidia.com/cuda/cuda-installation-guide-linux/).
+2. Install cuDNN 7 for CUDA 9.0 following the NVIDIA's [installation guide](https://developer.nvidia.com/cudnn). You may need to register with NVIDIA for downloading the cuDNN library.
 
 **Note:** Make sure to add CUDA install path to `LD_LIBRARY_PATH`.
 
@@ -1077,7 +1075,7 @@ Clone the MXNet source code repository using the following ```git``` command in
 Edit the Makefile to install the MXNet with CUDA bindings to leverage the GPU on the Jetson:
 ```bash
     cp make/config.mk .
-    echo "USE_CUDA=1" >> config.mk    
+    echo "USE_CUDA=1" >> config.mk
     echo "USE_CUDA_PATH=/usr/local/cuda" >> config.mk
     echo "USE_CUDNN=1" >> config.mk
 ```
@@ -1110,7 +1108,7 @@ Add the mxnet folder to the path:
 
 ```bash
     cd ..
-    export MXNET_HOME=$(pwd)                       
+    export MXNET_HOME=$(pwd)
     echo "export PYTHONPATH=$MXNET_HOME/python:$PYTHONPATH" >> ~/.bashrc
     source ~/.bashrc
 ```
@@ -1458,15 +1456,13 @@ Will be available soon.
   </div>
     <div class="gpu">
 
-The following installation instructions have been tested on Ubuntu 14.04 and 16.04.
-
 
 **Prerequisites**
 
 Install the following NVIDIA libraries to setup *MXNet* with GPU support:
 
-1. Install CUDA 8.0 following the NVIDIA's [installation guide](http://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows).
-2. Install cuDNN 7 for CUDA 8.0 following the NVIDIA's [installation guide](https://developer.nvidia.com/cudnn). You may need to register with NVIDIA for downloading the cuDNN library.
+1. Install CUDA 9.0 following the NVIDIA's [installation guide](http://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows).
+2. Install cuDNN 7 for CUDA 9.0 following the NVIDIA's [installation guide](https://developer.nvidia.com/cudnn). You may need to register with NVIDIA for downloading the cuDNN library.
 
 **Note:** Make sure to add CUDA install path to `PATH`.
 
@@ -1477,10 +1473,10 @@ Install the following NVIDIA libraries to setup *MXNet* with GPU support:
 
 Recommend install ```Anaconda3``` [here](https://www.anaconda.com/download/)
 
-**Step 2**  Install *MXNet* with GPU support using CUDA 8.0
+**Step 2**  Install *MXNet* with GPU support using CUDA 9.0
 
 ```bash
-$ pip install mxnet-cu80
+$ pip install mxnet-cu90
 ```
 
 </div>
diff --git a/versions/master/_sources/install/windows_setup.txt b/versions/master/_sources/install/windows_setup.txt
index bf1673a..e5e92a7 100644
--- a/versions/master/_sources/install/windows_setup.txt
+++ b/versions/master/_sources/install/windows_setup.txt
@@ -25,16 +25,28 @@ To build and install MXNet yourself, you need the following dependencies. Instal
 2. Download and Install [CMake](https://cmake.org/) if it is not already installed.
 3. Download and install [OpenCV](http://sourceforge.net/projects/opencvlibrary/files/opencv-win/3.0.0/opencv-3.0.0.exe/download).
 4. Unzip the OpenCV package.
-5. Set the environment variable ```OpenCV_DIR``` to point to the ```OpenCV build directory```.
-6. If you don't have the Intel Math Kernel Library (MKL) installed, download and install [OpenBlas](http://sourceforge.net/projects/openblas/files/v0.2.14/).
-7. Set the environment variable ```OpenBLAS_HOME``` to point to the ```OpenBLAS``` directory that contains the ```include``` and ```lib``` directories. Typically, you can find the directory in ```C:\Program files (x86)\OpenBLAS\```.
-8. Download and install [CuDNN](https://developer.nvidia.com/cudnn). To get access to the download link, register as an NVIDIA community user.
+5. Set the environment variable ```OpenCV_DIR``` to point to the ```OpenCV build directory``` (```c:\utils\opencv\build``` for example).
+6. If you have Intel Math Kernel Library (MKL) installed, set ```MKL_ROOT``` to point to ```MKL``` directory that contains the ```include``` and ```lib```. Typically, you can find the directory in
+```C:\Program Files (x86)\IntelSWTools\compilers_and_libraries_2018\windows\mkl```.
+7. If you don't have the Intel Math Kernel Library (MKL) installed, download and install [OpenBlas](http://sourceforge.net/projects/openblas/files/v0.2.14/).
+8. Set the environment variable ```OpenBLAS_HOME``` to point to the ```OpenBLAS``` directory that contains the ```include``` and ```lib``` directories. Typically, you can find the directory in ```C:\Program files (x86)\OpenBLAS\```.
+9. Download and install [CuDNN](https://developer.nvidia.com/cudnn). To get access to the download link, register as an NVIDIA community user.
 
 After you have installed all of the required dependencies, build the MXNet source code:
 
-1. Download the MXNet source code from [GitHub](https://github.com/dmlc/mxnet).
-2. Use [CMake](https://cmake.org/) to create a Visual Studio solution in ```./build```.
-3. In Visual Studio, open the solution file,```.sln```, and compile it.
+1. Download the MXNet source code from [GitHub](https://github.com/dmlc/mxnet). Don't forget to pull the submodules:
+```
+    git clone https://github.com/apache/incubator-mxnet.git ~/mxnet --recursive
+```
+2. Start a Visual Studio command prompt.
+3. Use [CMake](https://cmake.org/) to create a Visual Studio solution in ```./build``` or some other directory. Make sure to specify the architecture in the 
+[CMake](https://cmake.org/) command:
+```
+    mkdir build
+    cd build
+    cmake -G "Visual Studio 14 Win64" ..
+```
+4. In Visual Studio, open the solution file,```.sln```, and compile it.
 These commands produce a library called ```mxnet.dll``` in the ```./build/Release/``` or ```./build/Debug``` folder.
 
 
diff --git a/versions/master/_sources/tutorials/basic/image_io.txt b/versions/master/_sources/tutorials/basic/image_io.txt
index b017c9f..e643425 100644
--- a/versions/master/_sources/tutorials/basic/image_io.txt
+++ b/versions/master/_sources/tutorials/basic/image_io.txt
@@ -7,9 +7,9 @@ iterators to process image data.
 
 There are mainly three ways of loading image data in MXNet:
 
-- [NEW] `mx.img.ImageIter`: implemented in python, easily customizable, can load
+- [NEW] [mx.img.ImageIter](https://mxnet.incubator.apache.org/versions/master/api/python/image/image.html#mxnet.image.ImageIter): implemented in python, easily customizable, can load
   from both .rec files and raw image files.
-- [OLD] `mx.io.ImageRecordIter`: implemented in backend (C++), less customizable
+- [OLD] [mx.io.ImageRecordIter](https://mxnet.incubator.apache.org/versions/master/api/python/io.html#mxnet.io.ImageRecordIter): implemented in backend (C++), less customizable
   but can be used in all language bindings, load from .rec files
 - Custom iterator by inheriting mx.io.DataIter
 
@@ -17,7 +17,7 @@ First, we explain the record io file format used by mxnet:
 
 ## RecordIO
 
-Record IO is the main file format used by MXNet for data IO. It supports reading
+[Record IO](https://mxnet.incubator.apache.org/architecture/note_data_loading.html#data-format) is the main file format used by MXNet for data IO. It supports reading
 and writing on various file systems including distributed file systems like
 Hadoop HDFS and AWS S3.  First, we download the Caltech 101 dataset that
 contains 101 classes of objects and convert them into record io format:
@@ -34,7 +34,7 @@ import matplotlib.pyplot as plt
 MXNET_HOME = '/scratch/mxnet'
 ```
 
-Download and unzip:
+Download and unzip the dataset. The dataset is about ~126MB and may take some time:
 
 ```python
 os.system('wget http://www.vision.caltech.edu/Image_Datasets/Caltech101/101_ObjectCategories.tar.gz -P data/')
@@ -43,15 +43,18 @@ os.system('tar -xf 101_ObjectCategories.tar.gz')
 os.chdir('../')
 ```
 
-Let's take a look at the data. As you can see, under the
-[root folder](./data/101_ObjectCategories) every category has a
-[subfolder](./data/101_ObjectCategories/yin_yang).
+Let's take a look at the data. 
+
+As you can see, under the
+root folder (data/101_ObjectCategories) every category has a
+subfolder (e.g. data/101_ObjectCategories/yin_yang).
 
 Now let's convert them into record io format. First we need to make a list that
 contains all the image files and their categories:
 
 
 ```python
+assert(MXNET_HOME != '/scratch/mxnet'), "Please update your MXNet location"
 os.system('python %s/tools/im2rec.py --list=1 --recursive=1 --shuffle=1 --test-ratio=0.2 data/caltech data/101_ObjectCategories'%MXNET_HOME)
 ```
 
@@ -66,7 +69,7 @@ Then we can use this list to create our record io file:
 os.system("python %s/tools/im2rec.py --num-thread=4 --pass-through=1 data/caltech data/101_ObjectCategories"%MXNET_HOME)
 ```
 
-The record io files are now saved at [here](./data)
+The record io files are now saved in the "data" directory.
 
 ## ImageRecordIter
 
diff --git a/versions/master/_sources/tutorials/basic/module.txt b/versions/master/_sources/tutorials/basic/module.txt
index e0618ca..48f9086 100644
--- a/versions/master/_sources/tutorials/basic/module.txt
+++ b/versions/master/_sources/tutorials/basic/module.txt
@@ -1,3 +1,4 @@
+
 # Module - Neural network training and inference
 
 Training a neural network involves quite a few steps. One need to specify how
@@ -35,6 +36,7 @@ The following code downloads the dataset and creates an 80:20 train:test
 split. It also initializes a training data iterator to return a batch of 32
 training examples each time. A separate iterator is also created for test data.
 
+
 ```python
 import logging
 logging.getLogger().setLevel(logging.INFO)
@@ -51,8 +53,10 @@ train_iter = mx.io.NDArrayIter(data[:ntrain, :], label[:ntrain], batch_size, shu
 val_iter = mx.io.NDArrayIter(data[ntrain:, :], label[ntrain:], batch_size)
 ```
 
+
 Next, we define the network.
 
+
 ```python
 net = mx.sym.Variable('data')
 net = mx.sym.FullyConnected(net, name='fc1', num_hidden=64)
@@ -62,6 +66,13 @@ net = mx.sym.SoftmaxOutput(net, name='softmax')
 mx.viz.plot_network(net)
 ```
 
+
+
+
+![svg](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/doc/tutorials/basic/module/output_3_0.svg?sanitize=true)
+
+
+
 ## Creating a Module
 
 Now we are ready to introduce module. The commonly used module class is
@@ -75,6 +86,7 @@ Now we are ready to introduce module. The commonly used module class is
 For `net`, we have only one data named `data`, and one label named `softmax_label`,
 which is automatically named for us following the name `softmax` we specified for the `SoftmaxOutput` operator.
 
+
 ```python
 mod = mx.mod.Module(symbol=net,
                     context=mx.cpu(),
@@ -100,6 +112,7 @@ To train a module, we need to perform following steps:
 
 This can be used as follows:
 
+
 ```python
 # allocate memory given the input data and label shapes
 mod.bind(data_shapes=train_iter.provide_data, label_shapes=train_iter.provide_label)
@@ -121,6 +134,13 @@ for epoch in range(5):
     print('Epoch %d, Training %s' % (epoch, metric.get()))
 ```
 
+    Epoch 0, Training ('accuracy', 0.4554375)
+    Epoch 1, Training ('accuracy', 0.6485625)
+    Epoch 2, Training ('accuracy', 0.7055625)
+    Epoch 3, Training ('accuracy', 0.7396875)
+    Epoch 4, Training ('accuracy', 0.764375)
+
+
 To learn more about these APIs, visit [Module API](http://mxnet.io/api/python/module.html).
 
 ## High-level Interface
@@ -134,6 +154,7 @@ and it internally executes the same steps.
 
 To fit a module, call the `fit` function as follows:
 
+
 ```python
 # reset train_iter to the beginning
 train_iter.reset()
@@ -153,6 +174,32 @@ mod.fit(train_iter,
         num_epoch=8)
 ```
 
+    INFO:root:Epoch[0] Train-accuracy=0.364625
+    INFO:root:Epoch[0] Time cost=0.388
+    INFO:root:Epoch[0] Validation-accuracy=0.557250
+    INFO:root:Epoch[1] Train-accuracy=0.633625
+    INFO:root:Epoch[1] Time cost=0.470
+    INFO:root:Epoch[1] Validation-accuracy=0.634750
+    INFO:root:Epoch[2] Train-accuracy=0.697187
+    INFO:root:Epoch[2] Time cost=0.402
+    INFO:root:Epoch[2] Validation-accuracy=0.665500
+    INFO:root:Epoch[3] Train-accuracy=0.735062
+    INFO:root:Epoch[3] Time cost=0.402
+    INFO:root:Epoch[3] Validation-accuracy=0.713000
+    INFO:root:Epoch[4] Train-accuracy=0.762563
+    INFO:root:Epoch[4] Time cost=0.408
+    INFO:root:Epoch[4] Validation-accuracy=0.742000
+    INFO:root:Epoch[5] Train-accuracy=0.782312
+    INFO:root:Epoch[5] Time cost=0.400
+    INFO:root:Epoch[5] Validation-accuracy=0.778500
+    INFO:root:Epoch[6] Train-accuracy=0.797188
+    INFO:root:Epoch[6] Time cost=0.392
+    INFO:root:Epoch[6] Validation-accuracy=0.798250
+    INFO:root:Epoch[7] Train-accuracy=0.807750
+    INFO:root:Epoch[7] Time cost=0.401
+    INFO:root:Epoch[7] Validation-accuracy=0.789250
+
+
 By default, `fit` function has `eval_metric` set to `accuracy`, `optimizer` to `sgd`
 and optimizer_params to `(('learning_rate', 0.01),)`.
 
@@ -161,6 +208,7 @@ and optimizer_params to `(('learning_rate', 0.01),)`.
 To predict with module, we can call `predict()`. It will collect and
 return all the prediction results.
 
+
 ```python
 y = mod.predict(val_iter)
 assert y.shape == (4000, 26)
@@ -172,11 +220,15 @@ dataset and evaluates the performance according to the given input metric.
 
 It can be used as follows:
 
+
 ```python
 score = mod.score(val_iter, ['acc'])
 print("Accuracy score is %f" % (score[0][1]))
 ```
 
+    Accuracy score is 0.789250
+
+
 Some of the other metrics which can be used are `top_k_acc`(top-k-accuracy),
 `F1`, `RMSE`, `MSE`, `MAE`, `ce`(CrossEntropy). To learn more about the metrics,
 visit [Evaluation metric](http://mxnet.io/api/python/metric.html).
@@ -188,6 +240,7 @@ and tune these parameters to get best score.
 
 We can save the module parameters after each training epoch by using a checkpoint callback.
 
+
 ```python
 # construct a callback function to save checkpoints
 model_prefix = 'mx_mlp'
@@ -197,10 +250,28 @@ mod = mx.mod.Module(symbol=net)
 mod.fit(train_iter, num_epoch=5, epoch_end_callback=checkpoint)
 ```
 
+    INFO:root:Epoch[0] Train-accuracy=0.101062
+    INFO:root:Epoch[0] Time cost=0.422
+    INFO:root:Saved checkpoint to "mx_mlp-0001.params"
+    INFO:root:Epoch[1] Train-accuracy=0.263313
+    INFO:root:Epoch[1] Time cost=0.785
+    INFO:root:Saved checkpoint to "mx_mlp-0002.params"
+    INFO:root:Epoch[2] Train-accuracy=0.452188
+    INFO:root:Epoch[2] Time cost=0.624
+    INFO:root:Saved checkpoint to "mx_mlp-0003.params"
+    INFO:root:Epoch[3] Train-accuracy=0.544125
+    INFO:root:Epoch[3] Time cost=0.427
+    INFO:root:Saved checkpoint to "mx_mlp-0004.params"
+    INFO:root:Epoch[4] Train-accuracy=0.605250
+    INFO:root:Epoch[4] Time cost=0.399
+    INFO:root:Saved checkpoint to "mx_mlp-0005.params"
+
+
 To load the saved module parameters, call the `load_checkpoint` function. It
 loads the Symbol and the associated parameters. We can then set the loaded
 parameters into the module.
 
+
 ```python
 sym, arg_params, aux_params = mx.model.load_checkpoint(model_prefix, 3)
 assert sym.tojson() == net.tojson()
@@ -215,6 +286,7 @@ parameters, so that `fit()` knows to start from those parameters instead of
 initializing randomly from scratch. We also set the `begin_epoch` parameter so that
 `fit()` knows we are resuming from a previously saved epoch.
 
+
 ```python
 mod = mx.mod.Module(symbol=sym)
 mod.fit(train_iter,
@@ -224,4 +296,19 @@ mod.fit(train_iter,
         begin_epoch=3)
 ```
 
+    INFO:root:Epoch[3] Train-accuracy=0.544125
+    INFO:root:Epoch[3] Time cost=0.398
+    INFO:root:Epoch[4] Train-accuracy=0.605250
+    INFO:root:Epoch[4] Time cost=0.545
+    INFO:root:Epoch[5] Train-accuracy=0.644312
+    INFO:root:Epoch[5] Time cost=0.592
+    INFO:root:Epoch[6] Train-accuracy=0.675000
+    INFO:root:Epoch[6] Time cost=0.491
+    INFO:root:Epoch[7] Train-accuracy=0.695812
+    INFO:root:Epoch[7] Time cost=0.363
+
+
+
 <!-- INSERT SOURCE DOWNLOAD BUTTONS -->
+
+
diff --git a/versions/master/_sources/tutorials/c++/basics.txt b/versions/master/_sources/tutorials/c++/basics.txt
index cdf1a28..d3231e7 100644
--- a/versions/master/_sources/tutorials/c++/basics.txt
+++ b/versions/master/_sources/tutorials/c++/basics.txt
@@ -16,8 +16,8 @@ Except linking the MXNet shared library, the C++ package itself is a header-only
 which means all you need to do is to include the header files. Among the header files,
 `op.h` is special since it is generated dynamically. The generation should be done when
 [building the C++ package](http://mxnet.io/get_started/build_from_source.html#build-the-c++-package).
-After that, you also need to copy the shared library (`libmxnet.so` in linux,
-`libmxnet.dll` in windows) from `/path/to/mxnet/lib` to the working directory.
+It is important to note that you need to **copy the shared library** (`libmxnet.so` in Linux and MacOS,
+`libmxnet.dll` in Windows) from `/path/to/mxnet/lib` to the working directory.
 We do not recommend you to use pre-built binaries because MXNet is under heavy development,
 the operator definitions in `op.h` may be incompatible with the pre-built version.
 
@@ -49,7 +49,7 @@ auto val_iter = MXDataIter("MNISTIter")
     .CreateDataIter();
 ```
 
-The data have been successfully loaded, we can now easily construct various models to identify
+The data have been successfully loaded. We can now easily construct various models to identify
 the digits with the help of C++ package.
 
 
@@ -159,7 +159,12 @@ while (val_iter.Next()) {
 ```
 
 You can find the complete code in `mlp_cpu.cpp`. Use `make mlp_cpu` to compile it,
- and `./mlp_cpu` to run it.
+ and `./mlp_cpu` to run it. If it complains that the shared library `libmxnet.so` is not found
+ after typing `./mlp_cpu`, you will need to specify the path to the shared library in
+ the environment variable `LD_LIBRARY_PATH` in Linux and `DYLD_LIBRARY_PATH`
+ in MacOS. For example, if you are using MacOS, typing
+ `DYLD_LIBRARY_PATH+=. ./mlp_cpu` would solve the problem. It basically tells the system
+ to find the shared library under the current directory since we have just copied it here.
 
 GPU Support
 -----------
@@ -186,4 +191,6 @@ data_batch.label.CopyTo(&args["label"]);
 NDArray::WaitAll();
 ```
 
-By replacing the former code to the latter one, we successfully port the code to GPU. You can find the complete code in `mlp_gpu.cpp`. Compilation is similar to the cpu version. (Note: The shared library should be built with GPU support on)
+By replacing the former code to the latter one, we successfully port the code to GPU.
+You can find the complete code in `mlp_gpu.cpp`. Compilation is similar to the cpu version.
+Note that the shared library must be built with GPU support enabled.
diff --git a/versions/master/_sources/tutorials/gluon/mnist.txt b/versions/master/_sources/tutorials/gluon/mnist.txt
index 0abb8ea..ce23f1f 100644
--- a/versions/master/_sources/tutorials/gluon/mnist.txt
+++ b/versions/master/_sources/tutorials/gluon/mnist.txt
@@ -66,7 +66,7 @@ from mxnet import autograd as ag
 
 The first approach makes use of a [Multilayer Perceptron](https://en.wikipedia.org/wiki/Multilayer_perceptron) to solve this problem. We'll define the MLP using MXNet's imperative approach.
 
-MLPs contains several fully connected layers. A fully connected layer or FC layer for short, is one where each neuron in the layer is connected to every neuron in its preceding layer. From a linear algebra perspective, an FC layer applies an [affine transform](https://en.wikipedia.org/wiki/Affine_transformation) to the *n x m* input matrix *X* and outputs a matrix *Y* of size *n x k*, where *k* is the number of neurons in the FC layer. *k* is also referred to as the hidden size. The outp [...]
+MLPs consist of several fully connected layers. A fully connected layer or FC layer for short, is one where each neuron in the layer is connected to every neuron in its preceding layer. From a linear algebra perspective, an FC layer applies an [affine transform](https://en.wikipedia.org/wiki/Affine_transformation) to the *n x m* input matrix *X* and outputs a matrix *Y* of size *n x k*, where *k* is the number of neurons in the FC layer. *k* is also referred to as the hidden size. The ou [...]
 
 In an MLP, the outputs of most FC layers are fed into an activation function, which applies an element-wise non-linearity. This step is critical and it gives neural networks the ability to classify inputs that are not linearly separable. Common choices for activation functions are sigmoid, tanh, and [rectified linear unit](https://en.wikipedia.org/wiki/Rectifier_%28neural_networks%29) (ReLU). In this example, we'll use the ReLU activation function which has several desirable properties a [...]
 
diff --git a/versions/master/_sources/tutorials/index.txt b/versions/master/_sources/tutorials/index.txt
index d20a821..d691ecc 100644
--- a/versions/master/_sources/tutorials/index.txt
+++ b/versions/master/_sources/tutorials/index.txt
@@ -5,7 +5,7 @@
 Gluon is the high-level interface for MXNet. It is more intuitive and easier to use than the lower level interface.
 Gluon supports dynamic (define-by-run) graphs with JIT-compilation to achieve both flexibility and efficiency.
 
-This is a selected subset of Gluon tutorials that explains basic usage of Gluon and fundamental concepts in deep learning. For the comprehensive tutorial on Gluon that covers topics from basic statistics and probability theory to reinforcement learning and recommender systems, please see [gluon.mxnet.io](http://gluon.mxnet.io). 
+This is a selected subset of Gluon tutorials that explain basic usage of Gluon and fundamental concepts in deep learning. For a comprehensive tutorial on Gluon that covers topics from basic statistics and probability theory to reinforcement learning and recommender systems, please see [gluon.mxnet.io](http://gluon.mxnet.io).
 
 ### Basics
 
@@ -67,6 +67,15 @@ These tutorials introduce a few fundamental concepts in deep learning and how to
    sparse/train
 ```
 
+### Advanced Neural networks
+
+```eval_rst
+.. toctree::
+   :maxdepth: 1
+
+   unsupervised_learning/gan
+```
+
 <br>
 More tutorials and examples are available in the GitHub [repository](https://github.com/dmlc/mxnet/tree/master/example).
 
diff --git a/versions/master/_sources/tutorials/nlp/cnn.txt b/versions/master/_sources/tutorials/nlp/cnn.txt
index 23f74c4..7f56b76 100644
--- a/versions/master/_sources/tutorials/nlp/cnn.txt
+++ b/versions/master/_sources/tutorials/nlp/cnn.txt
@@ -1,6 +1,6 @@
 # Text Classification Using a Convolutional Neural Network on MXNet
 
-This tutorial is based of Yoon Kim's [paper](https://arxiv.org/abs/1408.5882) on using convolutional neural networks for sentence sentiment classification.
+This tutorial is based of Yoon Kim's [paper](https://arxiv.org/abs/1408.5882) on using convolutional neural networks for sentence sentiment classification. The tutorial has been tested on MXNet 1.0 running under Python 2.7 and Python 3.6.
 
 For this tutorial, we will train a convolutional deep network model on movie review sentences from Rotten Tomatoes labeled with their sentiment. The result will be a model that can classify a sentence based on its sentiment (with 1 being a purely positive sentiment, 0 being a purely negative sentiment and 0.5 being neutral).
 
@@ -8,16 +8,24 @@ Our first step will be to fetch the labeled training data of positive and negati
 
 
 ```python
-import urllib2
+from __future__ import print_function
+
+from collections import Counter
+import itertools
 import numpy as np
 import re
-import itertools
-from collections import Counter
 
+try:
+    # For Python 3.0 and later
+    from urllib.request import urlopen
+except ImportError:
+    # Fall back to Python 2's urllib2
+    from urllib2 import urlopen
+    
 def clean_str(string):
     """
-    Tokenization/string cleaning for all datasets except for SST.
-    Original taken from https://github.com/yoonkim/CNN_sentence/blob/master/process_data.py
+    Tokenization/string cleaning.
+    Original from https://github.com/yoonkim/CNN_sentence/blob/master/process_data.py
     """
     string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
     string = re.sub(r"\'s", " \'s", string)
@@ -32,38 +40,42 @@ def clean_str(string):
     string = re.sub(r"\)", " \) ", string)
     string = re.sub(r"\?", " \? ", string)
     string = re.sub(r"\s{2,}", " ", string)
+    
     return string.strip().lower()
 
+def download_sentences(url):
+    """
+    Download sentences from specified URL. 
+    
+    Strip trailing newline, convert to Unicode.
+    """
+    
+    remote_file = urlopen(url)
+    return [line.decode('Latin1').strip() for line in remote_file.readlines()]
+    
 def load_data_and_labels():
     """
-    Loads MR polarity data from files, splits the data into words and generates labels.
+    Loads polarity data from files, splits the data into words and generates labels.
     Returns split sentences and labels.
     """
-    # Pull sentences with positive sentiment
-    pos_file = urllib2.urlopen('https://raw.githubusercontent.com/yoonkim/CNN_sentence/master/rt-polarity.pos')
-
-    # Pull sentences with negative sentiment
-    neg_file = urllib2.urlopen('https://raw.githubusercontent.com/yoonkim/CNN_sentence/master/rt-polarity.neg')
-
-    # Load data from files
-    positive_examples = list(pos_file.readlines())
-    positive_examples = [s.strip() for s in positive_examples]
-    negative_examples = list(neg_file.readlines())
-    negative_examples = [s.strip() for s in negative_examples]
-    # Split by words
+
+    positive_examples = download_sentences('https://raw.githubusercontent.com/yoonkim/CNN_sentence/master/rt-polarity.pos')
+    negative_examples = download_sentences('https://raw.githubusercontent.com/yoonkim/CNN_sentence/master/rt-polarity.neg')
+    
+    # Tokenize
     x_text = positive_examples + negative_examples
-    x_text = [clean_str(sent) for sent in x_text]
-    x_text = [s.split(" ") for s in x_text]
+    x_text = [clean_str(sent).split(" ") for sent in x_text]
+
     # Generate labels
     positive_labels = [1 for _ in positive_examples]
     negative_labels = [0 for _ in negative_examples]
     y = np.concatenate([positive_labels, negative_labels], 0)
-    return [x_text, y]
+    return x_text, y
 
 
 def pad_sentences(sentences, padding_word="</s>"):
     """
-    Pads all sentences to the same length. The length is defined by the longest sentence.
+    Pads all sentences to be the length of the longest sentence.
     Returns padded sentences.
     """
     sequence_length = max(len(x) for x in sentences)
@@ -73,33 +85,40 @@ def pad_sentences(sentences, padding_word="</s>"):
         num_padding = sequence_length - len(sentence)
         new_sentence = sentence + [padding_word] * num_padding
         padded_sentences.append(new_sentence)
+        
     return padded_sentences
 
 
 def build_vocab(sentences):
     """
-    Builds a vocabulary mapping from word to index based on the sentences.
+    Builds a vocabulary mapping from token to index based on the sentences.
     Returns vocabulary mapping and inverse vocabulary mapping.
     """
     # Build vocabulary
     word_counts = Counter(itertools.chain(*sentences))
+    
     # Mapping from index to word
     vocabulary_inv = [x[0] for x in word_counts.most_common()]
+    
     # Mapping from word to index
     vocabulary = {x: i for i, x in enumerate(vocabulary_inv)}
-    return [vocabulary, vocabulary_inv]
+    
+    return vocabulary, vocabulary_inv
 
 
 def build_input_data(sentences, labels, vocabulary):
     """
     Maps sentences and labels to vectors based on a vocabulary.
     """
-    x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])
+    x = np.array([
+            [vocabulary[word] for word in sentence]
+            for sentence in sentences])
     y = np.array(labels)
-    return [x, y]
+    
+    return x, y
 
 """
-Loads and preprocessed data for the MR dataset.
+Loads and preprocesses data for the MR dataset.
 Returns input vectors, labels, vocabulary, and inverse vocabulary.
 """
 # Load and preprocess data
@@ -123,11 +142,11 @@ y_train, y_dev = y_shuffled[:-1000], y_shuffled[-1000:]
 
 sentence_size = x_train.shape[1]
 
-print 'Train/Dev split: %d/%d' % (len(y_train), len(y_dev))
-print 'train shape:', x_train.shape
-print 'dev shape:', x_dev.shape
-print 'vocab_size', vocab_size
-print 'sentence max words', sentence_size
+print('Train/Dev split: %d/%d' % (len(y_train), len(y_dev)))
+print('train shape:', x_train.shape)
+print('dev shape:', x_dev.shape)
+print('vocab_size', vocab_size)
+print('sentence max words', sentence_size)
 ```
 
     Train/Dev split: 9662/1000
@@ -150,8 +169,8 @@ import sys,os
 Define batch size and the place holders for network inputs and outputs
 '''
 
-batch_size = 50 # the size of batches to train network with
-print 'batch size', batch_size
+batch_size = 50
+print('batch size', batch_size)
 
 input_x = mx.sym.Variable('data') # placeholder for input data
 input_y = mx.sym.Variable('softmax_label') # placeholder for output label
@@ -163,7 +182,7 @@ Define the first network layer (embedding)
 
 # create embedding layer to learn representation of words in a lower dimensional subspace (much like word2vec)
 num_embed = 300 # dimensions to embed words into
-print 'embedding dimensions', num_embed
+print('embedding dimensions', num_embed)
 
 embed_layer = mx.sym.Embedding(data=input_x, input_dim=vocab_size, output_dim=num_embed, name='vocab_embed')
 
@@ -185,14 +204,14 @@ Because each convolution+pool filter produces tensors of different shapes we nee
 ```python
 # create convolution + (max) pooling layer for each filter operation
 filter_list=[3, 4, 5] # the size of filters to use
-print 'convolution filters', filter_list
+print('convolution filters', filter_list)
 
 num_filter=100
 pooled_outputs = []
-for i, filter_size in enumerate(filter_list):
+for filter_size in filter_list:
     convi = mx.sym.Convolution(data=conv_input, kernel=(filter_size, num_embed), num_filter=num_filter)
     relui = mx.sym.Activation(data=convi, act_type='relu')
-    pooli = mx.sym.Pooling(data=relui, pool_type='max', kernel=(sentence_size - filter_size + 1, 1), stride=(1,1))
+    pooli = mx.sym.Pooling(data=relui, pool_type='max', kernel=(sentence_size - filter_size + 1, 1), stride=(1, 1))
     pooled_outputs.append(pooli)
 
 # combine all pooled outputs
@@ -206,14 +225,14 @@ h_pool = mx.sym.Reshape(data=concat, target_shape=(batch_size, total_filters))
     convolution filters [3, 4, 5]
 
 
-Next, we add dropout regularization, which will randomly disable a fraction of neurons in the layer (set to 50% here) to ensure that that model does not overfit. This works by preventing neurons from co-adapting and forcing them to learn individually useful features.
+Next, we add dropout regularization, which will randomly disable a fraction of neurons in the layer (set to 50% here) to ensure that that model does not overfit. This prevents neurons from co-adapting and forces them to learn individually useful features.
 
 This is necessary for our model because the dataset has a vocabulary of size around 20k and only around 10k examples so since this data set is pretty small we're likely to overfit with a powerful model (like this neural net).
 
 
 ```python
 # dropout layer
-dropout=0.5
+dropout = 0.5
 print 'dropout probability', dropout
 
 if dropout > 0.0:
@@ -231,7 +250,7 @@ Finally, we add a fully connected layer to add non-linearity to the model. We th
 
 ```python
 # fully connected layer
-num_label=2
+num_label = 2
 
 cls_weight = mx.sym.Variable('cls_weight')
 cls_bias = mx.sym.Variable('cls_bias')
@@ -252,16 +271,16 @@ Now that we have defined our CNN model we will define the device on our machine
 
 ```python
 from collections import namedtuple
-import time
 import math
+import time
 
 # Define the structure of our CNN Model (as a named tuple)
 CNNModel = namedtuple("CNNModel", ['cnn_exec', 'symbol', 'data', 'label', 'param_blocks'])
 
 # Define what device to train/test on
-ctx=mx.gpu(0)
+ctx = mx.gpu(0)
 # If you have no GPU on your machine change this to
-# ctx=mx.cpu(0)
+# ctx = mx.cpu(0)
 
 arg_names = cnn.list_arguments()
 
@@ -280,16 +299,14 @@ cnn_exec = cnn.bind(ctx=ctx, args=arg_arrays, args_grad=args_grad, grad_req='add
 
 param_blocks = []
 arg_dict = dict(zip(arg_names, cnn_exec.arg_arrays))
-initializer=mx.initializer.Uniform(0.1)
+initializer = mx.initializer.Uniform(0.1)
 for i, name in enumerate(arg_names):
     if name in ['softmax_label', 'data']: # input, output
         continue
-    initializer(name, arg_dict[name])
+    initializer(mx.init.InitDesc(name), arg_dict[name])
 
     param_blocks.append( (i, arg_dict[name], args_grad[name], name) )
 
-out_dict = dict(zip(cnn.list_outputs(), cnn_exec.outputs))
-
 data = cnn_exec.arg_dict['data']
 label = cnn_exec.arg_dict['softmax_label']
 
@@ -304,15 +321,15 @@ We can now execute the training and testing of our network, which in-part mxnet
 Train the cnn_model using back prop
 '''
 
-optimizer='rmsprop'
-max_grad_norm=5.0
-learning_rate=0.0005
-epoch=50
+optimizer = 'rmsprop'
+max_grad_norm = 5.0
+learning_rate = 0.0005
+epoch = 50
 
-print 'optimizer', optimizer
-print 'maximum gradient', max_grad_norm
-print 'learning rate (step size)', learning_rate
-print 'epochs to train for', epoch
+print('optimizer', optimizer)
+print('maximum gradient', max_grad_norm)
+print('learning rate (step size)', learning_rate)
+print('epochs to train for', epoch)
 
 # create optimizer
 opt = mx.optimizer.create(optimizer)
@@ -320,9 +337,6 @@ opt.lr = learning_rate
 
 updater = mx.optimizer.get_updater(opt)
 
-# create logging output
-logs = sys.stderr
-
 # For each training epoch
 for iteration in range(epoch):
     tic = time.time()
@@ -369,7 +383,7 @@ for iteration in range(epoch):
     # Decay learning rate for this epoch to ensure we are not "overshooting" optima
     if iteration % 50 == 0 and iteration > 0:
         opt.lr *= 0.5
-        print >> logs, 'reset learning rate to %g' % opt.lr
+        print('reset learning rate to %g' % opt.lr)
 
     # End of training loop for this epoch
     toc = time.time()
@@ -380,11 +394,11 @@ for iteration in range(epoch):
     if (iteration + 1) % 10 == 0:
         prefix = 'cnn'
         cnn_model.symbol.save('./%s-symbol.json' % prefix)
-        save_dict = {('arg:%s' % k) :v  for k, v in cnn_model.cnn_exec.arg_dict.items()}
+        save_dict = {('arg:%s' % k) : v  for k, v in cnn_model.cnn_exec.arg_dict.items()}
         save_dict.update({('aux:%s' % k) : v for k, v in cnn_model.cnn_exec.aux_dict.items()})
         param_name = './%s-%04d.params' % (prefix, iteration)
         mx.nd.save(param_name, save_dict)
-        print >> logs, 'Saved checkpoint to %s' % param_name
+        print('Saved checkpoint to %s' % param_name)
 
 
     # Evaluate model after this epoch on dev (test) set
@@ -406,10 +420,28 @@ for iteration in range(epoch):
         num_total += len(batchY)
 
     dev_acc = num_correct * 100 / float(num_total)
-    print >> logs, 'Iter [%d] Train: Time: %.3fs, Training Accuracy: %.3f \
-            --- Dev Accuracy thus far: %.3f' % (iteration, train_time, train_acc, dev_acc)
+    print('Iter [%d] Train: Time: %.3fs, Training Accuracy: %.3f \
+            --- Dev Accuracy thus far: %.3f' % (iteration, train_time, train_acc, dev_acc))
 ```
 
+
+    optimizer rmsprop
+    maximum gradient 5.0
+    learning rate (step size) 0.0005
+    epochs to train for 50
+    Iter [0] Train: Time: 3.903s, Training Accuracy: 56.290             --- Dev Accuracy thus far: 63.300
+    Iter [1] Train: Time: 3.142s, Training Accuracy: 71.917             --- Dev Accuracy thus far: 69.400
+    Iter [2] Train: Time: 3.146s, Training Accuracy: 80.508             --- Dev Accuracy thus far: 73.900
+    Iter [3] Train: Time: 3.142s, Training Accuracy: 87.233             --- Dev Accuracy thus far: 76.300
+    Iter [4] Train: Time: 3.145s, Training Accuracy: 91.057             --- Dev Accuracy thus far: 77.100
+    Iter [5] Train: Time: 3.145s, Training Accuracy: 94.073             --- Dev Accuracy thus far: 77.700
+    Iter [6] Train: Time: 3.147s, Training Accuracy: 96.000             --- Dev Accuracy thus far: 77.400
+    Iter [7] Train: Time: 3.150s, Training Accuracy: 97.399             --- Dev Accuracy thus far: 77.100
+    Iter [8] Train: Time: 3.144s, Training Accuracy: 98.425             --- Dev Accuracy thus far: 78.000
+    Saved checkpoint to ./cnn-0009.params
+    Iter [9] Train: Time: 3.151s, Training Accuracy: 99.192             --- Dev Accuracy thus far: 77.100
+    ...
+
 Now that we have gone through the trouble of training the model, we have stored the learned parameters in the .params file in our local directory. We can now load this file whenever we want and predict the sentiment of new sentences by running them through a forward pass of the trained model.
 
 ## References
diff --git a/versions/master/_sources/tutorials/python/linear-regression.txt b/versions/master/_sources/tutorials/python/linear-regression.txt
index c26435d..fc3e713 100644
--- a/versions/master/_sources/tutorials/python/linear-regression.txt
+++ b/versions/master/_sources/tutorials/python/linear-regression.txt
@@ -156,9 +156,9 @@ parameters of the model to fit the training data. This is accomplished using the
 ```python
 model.fit(train_iter, eval_iter,
             optimizer_params={'learning_rate':0.005, 'momentum': 0.9},
-            num_epoch=50,
+            num_epoch=20,
             eval_metric='mse',
-            batch_end_callback = mx.callback.Speedometer(batch_size, 2))
+            batch_end_callback = mx.callback.Speedometer(batch_size, 2))	    
 ```
 
 ## Using a trained model: (Testing and Inference)
@@ -176,6 +176,7 @@ evaluating our model's mean squared error (MSE) on the evaluation data.
 ```python
 metric = mx.metric.MSE()
 model.score(eval_iter, metric)
+assert model.score(eval_iter, metric)[0][1] < 0.01001, "Achieved MSE (%f) is larger than expected (0.01001)" % model.score(eval_iter, metric)[0][1]
 ```
 
 Let us try and add some noise to the evaluation data and see how the MSE changes:
diff --git a/versions/master/_sources/tutorials/python/mnist.txt b/versions/master/_sources/tutorials/python/mnist.txt
index 4fdf372..fc290e4 100644
--- a/versions/master/_sources/tutorials/python/mnist.txt
+++ b/versions/master/_sources/tutorials/python/mnist.txt
@@ -57,7 +57,7 @@ data = mx.sym.flatten(data=data)
 ```
 One might wonder if we are discarding valuable information by flattening. That is indeed true and we'll cover this more when we talk about convolutional neural networks where we preserve the input shape. For now, we'll go ahead and work with flattened images.
 
-MLPs contains several fully connected layers. A fully connected layer or FC layer for short, is one where each neuron in the layer is connected to every neuron in its preceding layer. From a linear algebra perspective, an FC layer applies an [affine transform](https://en.wikipedia.org/wiki/Affine_transformation) to the *n x m* input matrix *X* and outputs a matrix *Y* of size *n x k*, where *k* is the number of neurons in the FC layer. *k* is also referred to as the hidden size. The outp [...]
+MLPs contains several fully connected layers. A fully connected layer or FC layer for short, is one where each neuron in the layer is connected to every neuron in its preceding layer. From a linear algebra perspective, an FC layer applies an [affine transform](https://en.wikipedia.org/wiki/Affine_transformation) to the *n x m* input matrix *X* and outputs a matrix *Y* of size *n x k*, where *k* is the number of neurons in the FC layer. *k* is also referred to as the hidden size. The outp [...]
 
 
 In an MLP, the outputs of most FC layers are fed into an activation function, which applies an element-wise non-linearity. This step is critical and it gives neural networks the ability to classify inputs that are not linearly separable. Common choices for activation functions are sigmoid, tanh, and [rectified linear unit](https://en.wikipedia.org/wiki/Rectifier_%28neural_networks%29) (ReLU). In this example, we'll use the ReLU activation function which has several desirable properties a [...]
diff --git a/versions/master/_sources/tutorials/sparse/csr.txt b/versions/master/_sources/tutorials/sparse/csr.txt
index 19ab482..f4d7b7d 100644
--- a/versions/master/_sources/tutorials/sparse/csr.txt
+++ b/versions/master/_sources/tutorials/sparse/csr.txt
@@ -84,6 +84,15 @@ a.asnumpy()
 ```
 
 
+
+
+    array([[ 7.,  0.,  8.,  0.],
+           [ 0.,  0.,  0.,  0.],
+           [ 0.,  9.,  0.,  0.]], dtype=float32)
+
+
+
+
 ```python
 import numpy as np
 # Create a CSRNDArray with numpy arrays
@@ -95,11 +104,31 @@ b.asnumpy()
 ```
 
 
+
+
+    array([[7, 0, 8, 0],
+           [0, 0, 0, 0],
+           [0, 9, 0, 0]])
+
+
+
+
 ```python
 # Compare the two. They are exactly the same.
 {'a':a.asnumpy(), 'b':b.asnumpy()}
 ```
 
+
+
+
+    {'a': array([[ 7.,  0.,  8.,  0.],
+            [ 0.,  0.,  0.,  0.],
+            [ 0.,  9.,  0.,  0.]], dtype=float32), 'b': array([[7, 0, 8, 0],
+            [0, 0, 0, 0],
+            [0, 9, 0, 0]])}
+
+
+
 You can create an MXNet CSRNDArray from a `scipy.sparse.csr.csr_matrix` object by using the `array` function:
 
 
@@ -115,8 +144,14 @@ except ImportError:
     print("scipy package is required")
 ```
 
+    d:[[7 0 8 0]
+     [0 0 0 0]
+     [0 9 0 0]]
+
+
 What if you have a big set of data and you haven't calculated indices or indptr yet? Let's try a simple CSRNDArray from an existing array of data and derive those values with some built-in functions. We can mockup a "big" dataset with a random amount of the data being non-zero, then compress it by using the `tostype` function, which is explained further in the [Storage Type Conversion](#storage-type-conversion) section:
 
+
 ```python
 big_array = mx.nd.round(mx.nd.random.uniform(low=0, high=1, shape=(1000, 100)))
 print(big_array)
@@ -130,6 +165,17 @@ data = big_array_csr.data
 # The total size of `data`, `indices` and `indptr` arrays is much lesser than the dense big_array!
 ```
 
+    
+    [[ 1.  1.  0. ...,  0.  1.  1.]
+     [ 0.  0.  0. ...,  0.  0.  1.]
+     [ 1.  0.  0. ...,  1.  0.  0.]
+     ..., 
+     [ 0.  1.  1. ...,  0.  0.  0.]
+     [ 1.  1.  0. ...,  1.  0.  1.]
+     [ 1.  0.  1. ...,  1.  0.  0.]]
+    <NDArray 1000x100 @cpu(0)>
+
+
 You can also create a CSRNDArray from another using the `array` function specifying the element data type with the option `dtype`,
 which accepts a numpy type. By default, `float32` is used.
 
@@ -142,6 +188,13 @@ f = mx.nd.array(a, dtype=np.float16)
 (e.dtype, f.dtype)
 ```
 
+
+
+
+    (numpy.float32, numpy.float16)
+
+
+
 ## Inspecting Arrays
 
 A variety of methods are available for you to use for inspecting CSR arrays:
@@ -158,6 +211,15 @@ its contents into a dense `numpy.ndarray` using the `asnumpy` function.
 a.asnumpy()
 ```
 
+
+
+
+    array([[ 7.,  0.,  8.,  0.],
+           [ 0.,  0.,  0.,  0.],
+           [ 0.,  9.,  0.,  0.]], dtype=float32)
+
+
+
 You can also inspect the internal storage of a CSRNDArray by accessing attributes such as `indptr`, `indices` and `data`:
 
 
@@ -171,6 +233,19 @@ indptr = a.indptr
 {'a.stype': a.stype, 'data':data, 'indices':indices, 'indptr':indptr}
 ```
 
+
+
+
+    {'a.stype': 'csr', 'data': 
+     [ 7.  8.  9.]
+     <NDArray 3 @cpu(0)>, 'indices': 
+     [0 2 1]
+     <NDArray 3 @cpu(0)>, 'indptr': 
+     [0 2 2 3]
+     <NDArray 4 @cpu(0)>}
+
+
+
 ## Storage Type Conversion
 
 You can also convert storage types with:
@@ -190,6 +265,17 @@ dense = csr.tostype('default')
 {'csr':csr, 'dense':dense}
 ```
 
+
+
+
+    {'csr': 
+     <CSRNDArray 2x2 @cpu(0)>, 'dense': 
+     [[ 1.  1.]
+      [ 1.  1.]]
+     <NDArray 2x2 @cpu(0)>}
+
+
+
 To convert the storage type by using the `cast_storage` operator:
 
 
@@ -203,6 +289,17 @@ dense = mx.nd.sparse.cast_storage(csr, 'default')
 {'csr':csr, 'dense':dense}
 ```
 
+
+
+
+    {'csr': 
+     <CSRNDArray 2x2 @cpu(0)>, 'dense': 
+     [[ 1.  1.]
+      [ 1.  1.]]
+     <NDArray 2x2 @cpu(0)>}
+
+
+
 ## Copies
 
 You can use the `copy` method which makes a deep copy of the array and its data, and returns a new array.
@@ -219,6 +316,16 @@ a.copyto(d)
 {'b is a': b is a, 'b.asnumpy()':b.asnumpy(), 'c.asnumpy()':c.asnumpy(), 'd.asnumpy()':d.asnumpy()}
 ```
 
+
+
+
+    {'b is a': False, 'b.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32), 'c.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32), 'd.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32)}
+
+
+
 If the storage types of source array and destination array do not match,
 the storage type of destination array will not change when copying with `copyto` or
 the slice operator `[]`.
@@ -233,6 +340,13 @@ g.copyto(f)
 {'e.stype':e.stype, 'f.stype':f.stype, 'g.stype':g.stype}
 ```
 
+
+
+
+    {'e.stype': 'csr', 'f.stype': 'csr', 'g.stype': 'default'}
+
+
+
 ## Indexing and Slicing
 You can slice a CSRNDArray on axis 0 with operator `[]`, which copies the slices and returns a new CSRNDArray.
 
@@ -244,6 +358,18 @@ c = a[:].asnumpy()
 {'a':a, 'b':b, 'c':c}
 ```
 
+
+
+
+    {'a': 
+     <CSRNDArray 3x2 @cpu(0)>,
+     'b': array([[ 2.,  3.]], dtype=float32),
+     'c': array([[ 0.,  1.],
+            [ 2.,  3.],
+            [ 4.,  5.]], dtype=float32)}
+
+
+
 Note that multi-dimensional indexing or slicing along a particular axis is currently not supported for a CSRNDArray.
 
 ## Sparse Operators and Storage Type Inference
@@ -262,6 +388,17 @@ out = mx.nd.sparse.dot(a, rhs)  # invoke sparse dot operator specialized for dot
 {'out':out}
 ```
 
+
+
+
+    {'out': 
+     [[ 15.]
+      [  0.]
+      [  9.]]
+     <NDArray 3x1 @cpu(0)>}
+
+
+
 For any sparse operator, the storage type of output array is inferred based on inputs. You can either read the documentation or inspect the `stype` attribute of the output array to know what storage type is inferred:
 
 
@@ -271,6 +408,13 @@ c = a + mx.nd.ones(shape=(3, 4))  # c will be a dense NDArray
 {'b.stype':b.stype, 'c.stype':c.stype}
 ```
 
+
+
+
+    {'b.stype': 'csr', 'c.stype': 'default'}
+
+
+
 For operators that don't specialize in sparse arrays, we can still use them with sparse inputs with some performance penalty. In MXNet, dense operators require all inputs and outputs to be in the dense format.
 
 If sparse inputs are provided, MXNet will convert sparse inputs into dense ones temporarily, so that the dense operator can be used.
@@ -285,6 +429,13 @@ e = mx.nd.log(a, out=e) # dense operator with a sparse output
 {'a.stype':a.stype, 'd.stype':d.stype, 'e.stype':e.stype} # stypes of a and e will be not changed
 ```
 
+
+
+
+    {'a.stype': 'csr', 'd.stype': 'default', 'e.stype': 'csr'}
+
+
+
 Note that warning messages will be printed when such a storage fallback event happens. If you are using jupyter notebook, the warning message will be printed in your terminal console.
 
 ## Data Loading
@@ -302,6 +453,16 @@ dataiter = mx.io.NDArrayIter(data, labels, batch_size, last_batch_handle='discar
 [batch.data[0] for batch in dataiter]
 ```
 
+
+
+
+    [
+     <CSRNDArray 3x4 @cpu(0)>, 
+     <CSRNDArray 3x4 @cpu(0)>, 
+     <CSRNDArray 3x4 @cpu(0)>]
+
+
+
 You can also load data stored in the [libsvm file format](https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/) using `mx.io.LibSVMIter`, where the format is: ``<label> <col_idx1>:<value1> <col_idx2>:<value2> ... <col_idxN>:<valueN>``. Each line in the file records the label and the column indices and data for non-zero entries. For example, for a matrix with 6 columns, ``1 2:1.5 4:-3.5`` means the label is ``1``, the data is ``[[0, 0, 1,5, 0, -3.5, 0]]``. More detailed examples of `m [...]
 
 
@@ -328,6 +489,23 @@ for batch in data_train:
     print(data_train.getlabel())
 ```
 
+    
+    <CSRNDArray 3x10 @cpu(0)>
+    
+    [ 1.  1.  1.]
+    <NDArray 3 @cpu(0)>
+    
+    <CSRNDArray 3x10 @cpu(0)>
+    
+    [ 1. -1. -2.]
+    <NDArray 3 @cpu(0)>
+    
+    <CSRNDArray 3x10 @cpu(0)>
+    
+    [-3. -3.  4.]
+    <NDArray 3 @cpu(0)>
+
+
 Note that in the file the column indices are expected to be sorted in ascending order per row, and be zero-based instead of one-based.
 
 ## Advanced Topics
@@ -351,5 +529,8 @@ except mx.MXNetError as err:
     sys.stderr.write(str(err))
 ```
 
+
 <!-- INSERT SOURCE DOWNLOAD BUTTONS -->
 
+
+
diff --git a/versions/master/_sources/tutorials/sparse/row_sparse.txt b/versions/master/_sources/tutorials/sparse/row_sparse.txt
index 55f8a7d..70ca6b8 100644
--- a/versions/master/_sources/tutorials/sparse/row_sparse.txt
+++ b/versions/master/_sources/tutorials/sparse/row_sparse.txt
@@ -8,6 +8,7 @@ the weights of models with sparse datasets, the derived gradients of the weights
 
 Let's say we perform a matrix multiplication of ``X``  and ``W``, where ``X`` is a 1x2 matrix, and ``W`` is a 2x3 matrix. Let ``Y`` be the matrix multiplication of the two matrices:
 
+
 ```python
 import mxnet as mx
 X = mx.nd.array([[1,0]])
@@ -16,6 +17,20 @@ Y = mx.nd.dot(X, W)
 {'X': X, 'W': W, 'Y': Y}
 ```
 
+
+
+
+    {'W': 
+     [[ 3.  4.  5.]
+      [ 6.  7.  8.]]
+     <NDArray 2x3 @cpu(0)>, 'X': 
+     [[ 1.  0.]]
+     <NDArray 1x2 @cpu(0)>, 'Y': 
+     [[ 3.  4.  5.]]
+     <NDArray 1x3 @cpu(0)>}
+
+
+
 As you can see,
 
 ```
@@ -37,11 +52,22 @@ grad_W[1][2] = X[0][1] = 0
 
 As a matter of fact, you can calculate ``grad_W`` by multiplying the transpose of ``X`` with a matrix of ones:
 
+
 ```python
 grad_W = mx.nd.dot(X, mx.nd.ones_like(Y), transpose_a=True)
 grad_W
 ```
 
+
+
+
+    
+    [[ 1.  1.  1.]
+     [ 0.  0.  0.]]
+    <NDArray 2x3 @cpu(0)>
+
+
+
 As you can see, row 0 of ``grad_W`` contains non-zero values while row 1 of ``grad_W`` does not. Why did that happen?
 If you look at how ``grad_W`` is calculated, notice that since column 1 of ``X`` is filled with zeros, row 1 of ``grad_W`` is filled with zeros too.
 
@@ -96,7 +122,6 @@ The row sparse representation would be:
 - `indices` array stores the row index for each row slice with non-zero elements.
 
 
-
 ```python
 data = [[1, 2, 3], [4, 0, 5]]
 indices = [0, 2]
@@ -150,6 +175,15 @@ b = mx.nd.sparse.row_sparse_array((data_np, indices_np), shape=shape)
 {'a':a, 'b':b}
 ```
 
+
+
+
+    {'a': 
+     <RowSparseNDArray 6x2 @cpu(0)>, 'b': 
+     <RowSparseNDArray 6x2 @cpu(0)>}
+
+
+
 ## Function Overview
 
 Similar to `CSRNDArray`, the are several functions with `RowSparseNDArray` that behave the same way. In the code blocks below you can try out these common functions:
@@ -177,6 +211,13 @@ d = mx.nd.array(a, dtype=np.float16)
 (c.dtype, d.dtype)
 ```
 
+
+
+
+    (numpy.float32, numpy.float16)
+
+
+
 ## Inspecting Arrays
 
 As with `CSRNDArray`, you can inspect the contents of a `RowSparseNDArray` by filling
@@ -187,6 +228,18 @@ its contents into a dense `numpy.ndarray` using the `asnumpy` function.
 a.asnumpy()
 ```
 
+
+
+
+    array([[ 0.,  0.],
+           [ 1.,  2.],
+           [ 0.,  0.],
+           [ 0.,  0.],
+           [ 3.,  4.],
+           [ 0.,  0.]], dtype=float32)
+
+
+
 You can inspect the internal storage of a RowSparseNDArray by accessing attributes such as `indices` and `data`:
 
 
@@ -198,6 +251,18 @@ indices = a.indices
 {'a.stype': a.stype, 'data':data, 'indices':indices}
 ```
 
+
+
+
+    {'a.stype': 'row_sparse', 'data': 
+     [[ 1.  2.]
+      [ 3.  4.]]
+     <NDArray 2x2 @cpu(0)>, 'indices': 
+     [1 4]
+     <NDArray 2 @cpu(0)>}
+
+
+
 ## Storage Type Conversion
 
 You can convert an NDArray to a RowSparseNDArray and vice versa by using the `tostype` function:
@@ -213,6 +278,17 @@ dense = rsp.tostype('default')
 {'rsp':rsp, 'dense':dense}
 ```
 
+
+
+
+    {'dense': 
+     [[ 1.  1.]
+      [ 1.  1.]]
+     <NDArray 2x2 @cpu(0)>, 'rsp': 
+     <RowSparseNDArray 2x2 @cpu(0)>}
+
+
+
 You can also convert the storage type by using the `cast_storage` operator:
 
 
@@ -226,6 +302,17 @@ dense = mx.nd.sparse.cast_storage(rsp, 'default')
 {'rsp':rsp, 'dense':dense}
 ```
 
+
+
+
+    {'dense': 
+     [[ 1.  1.]
+      [ 1.  1.]]
+     <NDArray 2x2 @cpu(0)>, 'rsp': 
+     <RowSparseNDArray 2x2 @cpu(0)>}
+
+
+
 ## Copies
 
 You can use the `copy` method which makes a deep copy of the array and its data, and returns a new array.
@@ -242,6 +329,16 @@ a.copyto(d)
 {'b is a': b is a, 'b.asnumpy()':b.asnumpy(), 'c.asnumpy()':c.asnumpy(), 'd.asnumpy()':d.asnumpy()}
 ```
 
+
+
+
+    {'b is a': False, 'b.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32), 'c.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32), 'd.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32)}
+
+
+
 If the storage types of source array and destination array do not match,
 the storage type of destination array will not change when copying with `copyto` or the slice operator `[]`. The source array will be temporarily converted to desired storage type before the copy.
 
@@ -255,6 +352,13 @@ g.copyto(f)
 {'e.stype':e.stype, 'f.stype':f.stype, 'g.stype':g.stype}
 ```
 
+
+
+
+    {'e.stype': 'row_sparse', 'f.stype': 'row_sparse', 'g.stype': 'default'}
+
+
+
 ## Retain Row Slices
 
 You can retain a subset of row slices from a RowSparseNDArray specified by their row indices.
@@ -269,6 +373,22 @@ rsp_retained = mx.nd.sparse.retain(rsp, mx.nd.array([0, 1]))
 {'rsp.asnumpy()': rsp.asnumpy(), 'rsp_retained': rsp_retained, 'rsp_retained.asnumpy()': rsp_retained.asnumpy()}
 ```
 
+
+
+
+    {'rsp.asnumpy()': array([[ 1.,  2.],
+            [ 0.,  0.],
+            [ 3.,  4.],
+            [ 5.,  6.],
+            [ 0.,  0.]], dtype=float32), 'rsp_retained': 
+     <RowSparseNDArray 5x2 @cpu(0)>, 'rsp_retained.asnumpy()': array([[ 1.,  2.],
+            [ 0.,  0.],
+            [ 0.,  0.],
+            [ 0.,  0.],
+            [ 0.,  0.]], dtype=float32)}
+
+
+
 ## Sparse Operators and Storage Type Inference
 
 Operators that have specialized implementation for sparse arrays can be accessed in ``mx.nd.sparse``. You can read the [mxnet.ndarray.sparse API documentation](http://mxnet.io/versions/master/api/python/ndarray/sparse.html) to find what sparse operators are available.
@@ -288,6 +408,18 @@ transpose_dot = mx.nd.sparse.dot(lhs, rhs, transpose_a=True)
 {'transpose_dot': transpose_dot, 'transpose_dot.asnumpy()': transpose_dot.asnumpy()}
 ```
 
+
+
+
+    {'transpose_dot': 
+     <RowSparseNDArray 5x2 @cpu(0)>, 'transpose_dot.asnumpy()': array([[ 7.,  7.],
+            [ 9.,  9.],
+            [ 8.,  8.],
+            [ 0.,  0.],
+            [ 0.,  0.]], dtype=float32)}
+
+
+
 For any sparse operator, the storage type of output array is inferred based on inputs. You can either read the documentation or inspect the `stype` attribute of output array to know what storage type is inferred:
 
 
@@ -298,6 +430,13 @@ c = a + mx.nd.ones((5, 2))  # c will be a dense NDArray
 {'b.stype':b.stype, 'c.stype':c.stype}
 ```
 
+
+
+
+    {'b.stype': 'row_sparse', 'c.stype': 'default'}
+
+
+
 For operators that don't specialize in sparse arrays, you can still use them with sparse inputs with some performance penalty.
 In MXNet, dense operators require all inputs and outputs to be in the dense format.
 
@@ -315,6 +454,13 @@ e = mx.nd.log(a, out=e) # dense operator with a sparse output
 {'a.stype':a.stype, 'd.stype':d.stype, 'e.stype':e.stype} # stypes of a and e will be not changed
 ```
 
+
+
+
+    {'a.stype': 'row_sparse', 'd.stype': 'default', 'e.stype': 'row_sparse'}
+
+
+
 Note that warning messages will be printed when such a storage fallback event happens. If you are using jupyter notebook, the warning message will be printed in your terminal console.
 
 ## Sparse Optimizers
@@ -355,12 +501,42 @@ momentum = sgd.create_state(0, weight)
 ```
 
 
+
+
+    {'grad.asnumpy()': array([[ 0.,  0.],
+            [ 1.,  2.],
+            [ 4.,  5.],
+            [ 0.,  0.]], dtype=float32), 'momentum.asnumpy()': array([[ 0.,  0.],
+            [ 0.,  0.],
+            [ 0.,  0.],
+            [ 0.,  0.]], dtype=float32), 'weight.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.],
+            [ 1.,  1.],
+            [ 1.,  1.]], dtype=float32)}
+
+
+
+
 ```python
 sgd.update(0, weight, grad, momentum)
 # Only row 0 and row 2 are updated for both weight and momentum
 {"weight.asnumpy()":weight.asnumpy(), "momentum.asnumpy()":momentum.asnumpy()}
 ```
 
+
+
+
+    {'momentum.asnumpy()': array([[ 0.  ,  0.  ],
+            [-0.01, -0.02],
+            [-0.04, -0.05],
+            [ 0.  ,  0.  ]], dtype=float32),
+     'weight.asnumpy()': array([[ 1.        ,  1.        ],
+            [ 0.99000001,  0.98000002],
+            [ 0.95999998,  0.94999999],
+            [ 1.        ,  1.        ]], dtype=float32)}
+
+
+
 Note that both [mxnet.optimizer.SGD](https://mxnet.incubator.apache.org/api/python/optimization.html#mxnet.optimizer.SGD)
 and [mxnet.optimizer.Adam](https://mxnet.incubator.apache.org/api/python/optimization.html#mxnet.optimizer.Adam) support sparse updates in MXNet.
 
@@ -387,6 +563,5 @@ except mx.MXNetError as err:
 ```
 
 
-<!-- INSERT SOURCE DOWNLOAD BUTTONS -->
-
 
+<!-- INSERT SOURCE DOWNLOAD BUTTONS -->
diff --git a/versions/master/_sources/tutorials/sparse/train.txt b/versions/master/_sources/tutorials/sparse/train.txt
index 22ce039..6f4e808 100644
--- a/versions/master/_sources/tutorials/sparse/train.txt
+++ b/versions/master/_sources/tutorials/sparse/train.txt
@@ -45,6 +45,13 @@ c = mx.sym.Variable('c', stype='row_sparse')
 (a, b, c)
 ```
 
+
+
+
+    (<Symbol a>, <Symbol b>, <Symbol c>)
+
+
+
 ### Bind with Sparse Arrays
 
 The sparse symbols constructed above declare storage types of the arrays to hold.
@@ -67,6 +74,11 @@ c_exec.forward()
 print(b_exec.outputs, c_exec.outputs)
 ```
 
+    ([
+    <CSRNDArray 2x2 @cpu(0)>], [
+    <RowSparseNDArray 2x2 @cpu(0)>])
+
+
 You can update the array held by the variable by accessing executor's `arg_dict` and assigning new values.
 
 
@@ -78,6 +90,15 @@ eval_b = b_exec.outputs[0]
 {'eval_b': eval_b, 'eval_b.asnumpy()': eval_b.asnumpy()}
 ```
 
+
+
+
+    {'eval_b': 
+     <CSRNDArray 2x2 @cpu(0)>, 'eval_b.asnumpy()': array([[ 1.,  1.],
+            [ 1.,  1.]], dtype=float32)}
+
+
+
 ## Symbol Composition and Storage Type Inference
 
 ### Basic Symbol Composition
@@ -96,6 +117,15 @@ f = mx.sym.sparse.elemwise_add(c, c)
 {'d':d, 'e':e, 'f':f}
 ```
 
+
+
+
+    {'d': <Symbol elemwise_add0>,
+     'e': <Symbol negative0>,
+     'f': <Symbol elemwise_add1>}
+
+
+
 ### Storage Type Inference
 
 What will be the output storage types of sparse symbols? In MXNet, for any sparse symbol, the result storage types are inferred based on storage types of inputs.
@@ -113,6 +143,15 @@ rsp_add = add_exec.outputs[2]
 {'dense_add.stype': dense_add.stype, 'csr_add.stype':csr_add.stype, 'rsp_add.stype': rsp_add.stype}
 ```
 
+
+
+
+    {'csr_add.stype': 'csr',
+     'dense_add.stype': 'default',
+     'rsp_add.stype': 'row_sparse'}
+
+
+
 ### Storage Type Fallback
 
 For operators that don't specialize in certain sparse arrays, you can still use them with sparse inputs with some performance penalty. In MXNet, dense operators require all inputs and outputs to be in the dense format. If sparse inputs are provided, MXNet will convert sparse inputs into dense ones temporarily so that the dense operator can be used. If sparse outputs are provided, MXNet will convert the dense outputs generated by the dense operator into the provided sparse format. Warning [...]
@@ -130,6 +169,19 @@ fallback_log = fallback_exec.outputs[1]
 {'fallback_add': fallback_add, 'fallback_log': fallback_log}
 ```
 
+
+
+
+    {'fallback_add': 
+     [[ 0.  0.]
+      [ 0.  0.]]
+     <NDArray 2x2 @cpu(0)>, 'fallback_log': 
+     [[-inf -inf]
+      [-inf -inf]]
+     <NDArray 2x2 @cpu(0)>}
+
+
+
 ### Inspecting Storage Types of the Symbol Graph (Work in Progress)
 
 When the environment variable `MXNET_INFER_STORAGE_TYPE_VERBOSE_LOGGING` is set to `1`, MXNet will log the storage type information of
@@ -244,13 +296,25 @@ for epoch in range(10):
         mod.backward()                          # compute gradients
         mod.update()                            # update parameters
     print('Epoch %d, Metric = %s' % (epoch, metric.get()))
+assert metric.get()[1] < 1, "Achieved MSE (%f) is larger than expected (1.0)" % metric.get()[1]    
 ```
 
+    Epoch 0, Metric = ('mse', 886.16457029229127)
+    Epoch 1, Metric = ('mse', 173.16523056503445)
+    Epoch 2, Metric = ('mse', 71.625164168341811)
+    Epoch 3, Metric = ('mse', 29.625375983519298)
+    Epoch 4, Metric = ('mse', 12.45004676561909)
+    Epoch 5, Metric = ('mse', 6.9090727975622368)
+    Epoch 6, Metric = ('mse', 3.0759215722750142)
+    Epoch 7, Metric = ('mse', 1.3106610134811276)
+    Epoch 8, Metric = ('mse', 0.63063102482907718)
+    Epoch 9, Metric = ('mse', 0.35979430613957991)
+
+
+
 
 ### Training the model with multiple machines
 
 To train a sparse model with multiple machines, please refer to the example in [mxnet/example/sparse/](https://github.com/apache/incubator-mxnet/tree/master/example/sparse)
 
 <!-- INSERT SOURCE DOWNLOAD BUTTONS -->
-
-
diff --git a/versions/master/_sources/tutorials/unsupervised_learning/gan.txt b/versions/master/_sources/tutorials/unsupervised_learning/gan.txt
index 709e132..71774bc 100644
--- a/versions/master/_sources/tutorials/unsupervised_learning/gan.txt
+++ b/versions/master/_sources/tutorials/unsupervised_learning/gan.txt
@@ -1,43 +1,43 @@
-# Generative Adversarial Networks
+# Generative Adversarial Network (GAN)
 
-GANs are an application of unsupervised learning - you don't need labels for your dataset in order to train a GAN.
- 
-The GAN framework composes of two neural networks: a generator network and a discriminator network.
+Generative Adversarial Networks (GANs) are a class of algorithms used in unsupervised learning - you don't need labels for your dataset in order to train a GAN.
 
-The generator's job is to take a set of random numbers and produce data (such as images or text).
+The GAN framework is composed of two neural networks: a Generator network and a Discriminator network.
 
-The discriminator then takes in that data as well as samples of that data from a dataset and tries to determine if is "fake" (created by the generator network) or "real" (from the original dataset).
+The Generator's job is to take a set of random numbers and produce the data (such as images or text).
 
-During training, the two networks play a game against each other. The generator tries to create realistic data, so that it can fool the discriminator into thinking that the data it generated is from the original dataset. At the same time, the discriminator tries to not be fooled - it learns to become better at determining if data is real or fake.
+The Discriminator then takes in that data as well as samples of that data from a dataset and tries to determine if it is "fake" (created by the Generator network) or "real" (from the original dataset).
 
-Since the two networks are fighting in this game, they can be seen as as adversaries, which is where the term "Generative Adverserial Network" comes from.
+During training, the two networks play a game against each other. The Generator tries to create realistic data, so that it can fool the Discriminator into thinking that the data it generated is from the original dataset. At the same time, the Discriminator tries to not be fooled - it learns to become better at determining if data is real or fake.
+
+Since the two networks are fighting in this game, they can be seen as as adversaries, which is where the term "Generative Adversarial Network" comes from.
 
 ## Deep Convolutional Generative Adversarial Networks
 
 This tutorial takes a look at Deep Convolutional Generative Adversarial Networks (DCGAN), which combines Convolutional Neural Networks (CNNs) and GANs.
 
-We will create a DCGAN that is able to create images of handwritten digits from random numbers.The tutorial uses the neural net architecture and guidelines outlined in [this paper](https://arxiv.org/abs/1511.06434), and the MNIST dataset.
+We will create a DCGAN that is able to create images of handwritten digits from random numbers. The tutorial uses the neural net architecture and guidelines outlined in [this paper](https://arxiv.org/abs/1511.06434), and the MNIST dataset.
 
-##How to Use This Tutorial
+## How to Use This Tutorial
 You can use this tutorial by executing each snippet of python code in order as it appears in the tutorial.
 
 
-1. The first net is the "generator" and creates images of handwritten digits from random numbers.
-2. The second net is the "discriminator" and determines if the image created by the generator is real (a realistic looking image of handwritten digits) or fake (an image that doesn't look like it came from the original dataset).
-    
+1. The first net is the "Generator" and creates images of handwritten digits from random numbers.
+2. The second net is the "Discriminator" and determines if the image created by the Generator is real (a realistic looking image of handwritten digits) or fake (an image that does not look like it is from the original dataset).
+
 Apart from creating a DCGAN, you'll also learn:
 
-- How to manipulate and iterate through batches images that you can feed into your neural network.
+- How to manipulate and iterate through batches of image data that you can feed into your neural network.
 
 - How to create a custom MXNet data iterator that generates random numbers from a normal distribution.
 
-- How to create a custom training process in MXNet, using lower level functions from the MXNet Module API such as .bind() .forward() and .backward(). The training process for a DCGAN is more complex than many other neural net's, so we need to use these functions instead of using the higher level .fit() function.
+- How to create a custom training process in MXNet, using lower level functions from the MXNet Module API such as .bind() .forward() and .backward(). The training process for a DCGAN is more complex than many other neural networks, so we need to use these functions instead of using the higher level .fit() function.
 
 - How to visualize images as they are going through the training process
 
 ## Prerequisites
 
-This tutorial assumes you're familiar with the concept of CNN's and have implemented one in MXNet. You should also be familiar with the concept of logistic regression. Having a basic understanding for MXNet data iterators helps, since we'll create a custom Data Iterator to iterate though random numbers as inputs to our generator network. 
+This tutorial assumes you are familiar with the concepts of CNNs and have implemented one in MXNet. You should also be familiar with the concept of logistic regression. Having a basic understanding of MXNet data iterators helps, since we will create a custom data iterator to iterate though random numbers as inputs to the Generator network.
 
 This example is designed to be trained on a single GPU. Training this network on CPU can be slow, so it's recommended that you use a GPU for training.
 
@@ -47,17 +47,17 @@ To complete this tutorial, you need:
 - Python 2.7, and the following libraries for Python:
     - Numpy - for matrix math
     - OpenCV - for image manipulation
-    - Scikit-learn - to easily get our dataset
-    - Matplotlib - to visualize our output
+    - Scikit-learn - to easily get the MNIST dataset
+    - Matplotlib - to visualize the output
 
 ## The Data
-We need two pieces of data to train our DCGAN:
+We need two pieces of data to train the DCGAN:
     1. Images of handwritten digits from the MNIST dataset
     2. Random numbers from a normal distribution
 
-Our generator network will use the random numbers as the input to produce images of handwritten digits, and out discriminator network will use images of handwritten digits from the MNIST dataset to determine if images produced by our generator are realistic.
+The Generator network will use the random numbers as the input to produce the images of handwritten digits, and the Discriminator network will use images of handwritten digits from the MNIST dataset to determine if images produced by the Generator are realistic.
 
-We are going to use the python library, scikit-learn, to get the MNIST dataset. Scikit-learn comes with a function that gets the dataset for us, which we will then manipulate to create our training and testing inputs.
+We are going to use the python library, scikit-learn, to get the MNIST dataset. Scikit-learn comes with a function that gets the dataset for us, which we will then manipulate to create the training and testing inputs.
 
 The MNIST dataset contains 70,000 images of handwritten digits. Each image is 28x28 pixels in size. To create random numbers, we're going to create a custom MXNet data iterator, which will returns random numbers from a normal distribution as we need then.
 
@@ -65,13 +65,14 @@ The MNIST dataset contains 70,000 images of handwritten digits. Each image is 28
 
 ### 1. Preparing the MNSIT dataset
 
-Let's start by preparing our handwritten digits from the MNIST dataset. We import the fetch_mldata function from scikit-learn, and use it to get the MNSIT dataset. Notice that it's shape is 70000x784. This contains the 70000 images on every row and 784 pixels of each image in the columns of each row. Each image is 28x28 pixels, but has been flattened so that all 784 images are represented in a single list.
+Let us start by preparing the handwritten digits from the MNIST dataset. We import the fetch_mldata function from scikit-learn, and use it to get the MNSIT dataset. Notice that it's shape is 70000x784. This contains 70000 images, one per row and 784 pixels of each image in the columns of each row. Each image is 28x28 pixels, but has been flattened so that all 784 pixels are represented in a single list.
+
 ```python
 from sklearn.datasets import fetch_mldata
 mnist = fetch_mldata('MNIST original')
 ```
 
-Next, we'll randomize the handwritten digits by using numpy to create random permutations on the dataset on our rows (images). We'll then reshape our dataset from 70000x786 to 70000x28x28, so that every image in our dataset is arranged into a 28x28 grid, where each cell in the grid represents 1 pixel of the image.
+Next, we will randomize the handwritten digits by using numpy to create random permutations on the dataset on the rows (images). We will then reshape the dataset from 70000x786 to 70000x28x28, so that every image in the dataset is arranged into a 28x28 grid, where each cell in the grid represents 1 pixel of the image.
 
 ```python
 import numpy as np
@@ -81,22 +82,23 @@ p = np.random.permutation(mnist.data.shape[0])
 X = mnist.data[p]
 X = X.reshape((70000, 28, 28))
 ```
-Since the DCGAN that we're creating takes in a 64x64 image as the input, we'll use OpenCV to resize the each 28x28 image to 64x64 images:
+Since the DCGAN that we're creating takes in a 64x64 image as the input, we will use OpenCV to resize the each 28x28 image to 64x64 images:
 ```python
 import cv2
 X = np.asarray([cv2.resize(x, (64,64)) for x in X])
 ```
-Each pixel in our 64x64 image is represented by a number between 0-255, that represents the intensity of the pixel. However, we want to input numbers between -1 and 1 into our DCGAN, as suggested by the research paper. To rescale our pixels to be in the range of -1 to 1, we'll divide each pixel by (255/2). This put our images on a scale of 0-2. We can then subtract by 1, to get them in the range of -1 to 1.
+Each pixel in the 64x64 image is represented by a number between 0-255, that represents the intensity of the pixel. However, we want to input numbers between -1 and 1 into the DCGAN, as suggested by the [research paper](https://arxiv.org/abs/1511.06434). To rescale the pixel values, we will divide it by (255/2). This changes the scale to 0-2. We then subtract by 1 to get them in the range of -1 to 1.
+
 ```python
 X = X.astype(np.float32)/(255.0/2) - 1.0
 ```
-Ultimately, images are inputted into our neural net from a 70000x3x64x64 array, and they are currently in a 70000x64x64 array. We need to add 3 channels to our images. Typically when we are working with images, the 3 channels represent the red, green, and blue components of each image. Since the MNIST dataset is grayscale, we only need 1 channel to represent our dataset. We will pad the other channels with 0's:
+Ultimately, images are fed into the neural net through a 70000x3x64x64 array but they are currently in a 70000x64x64 array. We need to add 3 channels to the images. Typically, when we are working with the images, the 3 channels represent the red, green, and blue (RGB) components of each image. Since the MNIST dataset is grayscale, we only need 1 channel to represent the dataset. We will pad the other channels with 0's:
 
 ```python
 X = X.reshape((70000, 1, 64, 64))
 X = np.tile(X, (1, 3, 1, 1))
 ```
-Finally, we'll put our images into MXNet's NDArrayIter, which will allow MXNet to easily iterate through our images during training. We'll also split up them images into a batches, with 64 images in each batch. Every time we iterate, we'll get a 4 dimensional array with size (64, 3, 64, 64), representing a batch of 64 images.
+Finally, we will put the images into MXNet's NDArrayIter, which will allow MXNet to easily iterate through the images during training. We will also split them up into batches of 64 images each. Every time we iterate, we will get a 4 dimensional array with size (64, 3, 64, 64), representing a batch of 64 images.
 ```python
 import mxnet as mx
 batch_size = 64
@@ -104,7 +106,8 @@ image_iter = mx.io.NDArrayIter(X, batch_size=batch_size)
 ```
 ### 2. Preparing Random Numbers
 
-We need to input random numbers from a normal distribution to our generator network, so we'll create an MXNet DataIter that produces random numbers for each training batch. The DataIter is the base class of MXNet's Data Loading API. Below, we create a class called RandIter which is a subclass of DataIter. We use MXNet's built in mx.random.normal function in order to return the normally distributed random numbers every time we iterate.
+We need to input random numbers from a normal distribution to the Generator network, so we will create an MXNet DataIter that produces random numbers for each training batch. The DataIter is the base class of MXNet's Data Loading API. Below, we create a class called RandIter which is a subclass of DataIter. We use MXNet's built-in mx.random.normal function to return the random numbers from a normal distribution during the iteration.
+
 ```python
 class RandIter(mx.io.DataIter):
     def __init__(self, batch_size, ndim):
@@ -117,22 +120,22 @@ class RandIter(mx.io.DataIter):
         return True
 
     def getdata(self):
-        #Returns random numbers from a gaussian (normal) distribution 
+        #Returns random numbers from a gaussian (normal) distribution
         #with mean=0 and standard deviation = 1
         return [mx.random.normal(0, 1.0, shape=(self.batch_size, self.ndim, 1, 1))]
 ```
-When we initalize our RandIter, we need to provide two numbers: the batch size and how many random numbers we want to produce a single image from. This number is referred to as Z, and we'll set this to 100. This value comes from the research paper on the topic. Every time we iterate and get a batch of random numbers, we will get a 4 dimensional array with shape: (batch_size, Z, 1, 1), which in our example is (64, 100, 1, 1).
+When we initialize the RandIter, we need to provide two numbers: the batch size and how many random numbers we want in order to produce a single image from. This number is referred to as Z, and we will set this to 100. This value comes from the research paper on the topic. Every time we iterate and get a batch of random numbers, we will get a 4 dimensional array with shape: (batch_size, Z, 1, 1), which in the example is (64, 100, 1, 1).
 ```python
 Z = 100
 rand_iter = RandIter(batch_size, Z)
 ```
 ## Create the Model
 
-Our model has two networks that we will train together - the generator network and the disciminator network.
+The model has two networks that we will train together - the Generator network and the Discriminator network.
 
 ### The Generator
 
-Let's start off by defining the generator network, which uses deconvolutional layers (also callled fractionally strided layers) to generate an image form random numbers :
+Let us start off by defining the Generator network, which uses Deconvolution layers (also called as fractionally strided layers) to generate an image form random numbers :
 ```python
 no_bias = True
 fix_gamma = True
@@ -160,16 +163,16 @@ g5 = mx.sym.Deconvolution(gact4, name='g5', kernel=(4,4), stride=(2,2), pad=(1,1
 generatorSymbol = mx.sym.Activation(g5, name='gact5', act_type='tanh')
 ```
 
-Our generator image starts with random numbers that will be obtained from the RandIter we created earlier, so we created the rand variable for this input.
+The Generator image starts with random numbers that will be obtained from the RandIter we created earlier, so we created the rand variable for this input.
 We then start creating the model starting with a Deconvolution layer (sometimes called 'fractionally strided layer'). We apply batch normalization and ReLU activation after the Deconvolution layer.
 
-We repeat this process 4 times, applying a (2,2) stride and (1,1) pad at each Deconvolutional layer, which doubles the size of our image at each layer. By creating these layers, our generator network will have to learn to upsample our input vector of random numbers, Z at each layer, so that network output a final image. We also reduce half the number of filters at each layer, reducing dimensionality at each layer. Ultimatley, our output layer is a 64x64x3 layer, representing the size and [...]
+We repeat this process 4 times, applying a (2,2) stride and (1,1) pad at each Deconvolution layer, which doubles the size of the image at each layer. By creating these layers, the Generator network will have to learn to upsample the input vector of random numbers, Z at each layer, so that network output a final image. We also reduce by half the number of filters at each layer, reducing dimensionality at each layer. Ultimately, the output layer is a 64x64x3 layer, representing the size an [...]
 
-Notice we used 3 parameters to help us create our model: no_bias, fixed_gamma, and epsilon. Neurons in our network won't have a bias added to them, this seems to work better in practice for the DCGAN. In our batch norm layer, we set fixed_gamma=True, which means gamma=1 for all of our batch norm layers. epsilon is a small number that gets added to our batch norm so that we don't end up dividing by zero. By default, CuDNN requires that this number is greater than 1e-5, so we add a small n [...]
+Notice we used 3 parameters to help us create the model: no_bias, fixed_gamma, and epsilon. Neurons in the network won't have a bias added to them, this seems to work better in practice for the DCGAN. In the batch norm layer, we set fixed_gamma=True, which means gamma=1 for all of the batch norm layers. epsilon is a small number that gets added to the batch norm so that we don't end up dividing by zero. By default, CuDNN requires that this number is greater than 1e-5, so we add a small n [...]
 
 ### The Discriminator
 
-Let's now create our discriminator network, which will take in images of handwritten digits from the MNIST dataset and images created by the generator network:
+Let us now create the Discriminator network, which will take in images of handwritten digits from the MNIST dataset and images created by the Generator network:
 ```python
 data = mx.sym.Variable('data')
 
@@ -195,19 +198,22 @@ label = mx.sym.Variable('label')
 discriminatorSymbol = mx.sym.LogisticRegressionOutput(data=d5, label=label, name='dloss')
 ```
 
-We start off by creating the data variable, which is used to hold our input images to the discriminator.
+We start off by creating the data variable, which is used to hold the input images to the Discriminator.
+
+The Discriminator then goes through a series of 5 convolutional layers, each with a 4x4 kernel, 2x2 stride, and 1x1 pad. These layers half the size of the image (which starts at 64x64) at each convolutional layer. The model also increases dimensionality at each layer by doubling the number of filters per convolutional layer, starting at 128 filters and ending at 1024 filters before we flatten the output.
 
-The discriminator then goes through a series of 5 convolutional layers, each with a 4x4 kernel, 2x2 stride, and 1x1 pad. These layers half the size of the image (which starts at 64x64) at each convolutional layer. Our model also increases dimensionality at each layer by doubling the number of filters per convolutional layer, starting at 128 filters and ending at 1024 filters before we flatten the output.
+At the final convolution, we flatten the neural net to get one number as the final output of Discriminator network. This number is the probability that the image is real, as determined by the Discriminator. We use logistic regression to determine this probability. When we pass in "real" images from the MNIST dataset, we can label these as 1 and we can label the "fake" images from the Generator net as 0 to perform logistic regression on the Discriminator network.
 
-At the final convolution, we flatten the neural net to get one number as the final output of discriminator network. This number is the probability the image is real, as determined by our discriminator. We use logistic regression to determine this probability. When we pass in "real" images from the MNIST dataset, we can label these as 1 and we can label the "fake" images from the generator net as 0 to perform logistic regression on the discriminator network.
-Prepare the models using the Module API
+### Prepare the models using the Module API
 
-So far we have defined a MXNet Symbol for both the generator and the discriminator network. Before we can train our model, we need to bind these symbols using the Module API, which creates the computation graph for our models. It also allows us to decide how we want to initialize our model and what type of optimizer we want to use. Let's set up Module for both of our networks:
+So far we have defined a MXNet Symbol for both the Generator and the Discriminator network. Before we can train the model, we need to bind these symbols using the Module API, which creates the computation graph for the models. It also allows us to decide how we want to initialize the model and what type of optimizer we want to use. Let us set up the Module for both the networks:
 ```python
-#Hyperperameters
+#Hyper-parameters
 sigma = 0.02
 lr = 0.0002
 beta1 = 0.5
+# If you do not have a GPU. Use the below outlined
+# ctx = mx.cpu()
 ctx = mx.gpu(0)
 
 #=============Generator Module=============
@@ -236,27 +242,27 @@ discriminator.init_optimizer(
     })
 mods.append(discriminator)
 ```
-First, we create Modules for our networks and then bind the symbols that we've created in the previous steps to our modules.
-We use rand_iter.provide_data as the  data_shape to bind our generator network. This means that as we iterate though batches of data on the generator Module, our RandIter will provide us with random numbers to feed our Module using it's provide_data function.
+First, we create Modules for the networks and then bind the symbols that we've created in the previous steps to the modules.
+We use rand_iter.provide_data as the  data_shape to bind the Generator network. This means that as we iterate though batches of the data on the Generator Module, the RandIter will provide us with random numbers to feed the Module using it's provide_data function.
 
-Similarly, we bind the discriminator Module to image_iter.provide_data, which gives us images from MNIST from the NDArrayIter we had set up earlier, called image_iter.
+Similarly, we bind the Discriminator Module to image_iter.provide_data, which gives us images from MNIST from the NDArrayIter we had set up earlier, called image_iter.
 
-Notice that we're using the Normal initialization, with the hyperparameter sigma=0.02. This means our weight initializations for the neurons in our networks will random numbers from a Gaussian (normal) distribution with a mean of 0 and a standard deviation of 0.02.
+Notice that we are using the Normal Initialization, with the hyperparameter sigma=0.02. This means the weight initializations for the neurons in the networks will be random numbers from a Gaussian (normal) distribution with a mean of 0 and a standard deviation of 0.02.
 
-We also use the adam optimizer for gradient decent. We've set up two hyperparameters, lr and beta1 based on the values used in the DCGAN paper. We're using a single gpu, gpu(0) for training.
+We also use the Adam optimizer for gradient decent. We've set up two hyperparameters, lr and beta1 based on the values used in the DCGAN paper. We're using a single gpu, gpu(0) for training. Set the context to cpu() if you do not have a GPU on your machine.
 
-### Visualizing Our Training
-Before we train the model, let's set up some helper functions that will help visualize what our generator is producing, compared to what the real image is:
+### Visualizing The Training
+Before we train the model, let us set up some helper functions that will help visualize what the Generator is producing, compared to what the real image is:
 ```python
 from matplotlib import pyplot as plt
 
-#Takes the images in our batch and arranges them in an array so that they can be
+#Takes the images in the batch and arranges them in an array so that they can be
 #Plotted using matplotlib
 def fill_buf(buf, num_images, img, shape):
     width = buf.shape[0]/shape[1]
     height = buf.shape[1]/shape[0]
-    img_width = (num_images%width)*shape[0]
-    img_hight = (num_images/height)*shape[1]
+    img_width = int(num_images%width)*shape[0]
+    img_hight = int(num_images/height)*shape[1]
     buf[img_hight:img_hight+shape[1], img_width:img_width+shape[0], :] = img
 
 #Plots two images side by side using matplotlib
@@ -268,8 +274,8 @@ def visualize(fake, real):
     #Repeat for real image
     real = real.transpose((0, 2, 3, 1))
     real = np.clip((real+1.0)*(255.0/2.0), 0, 255).astype(np.uint8)
-    
-    #Create buffer array that will hold all the images in our batch
+
+    #Create buffer array that will hold all the images in the batch
     #Fill the buffer so to arrange all images in the batch onto the buffer array
     n = np.ceil(np.sqrt(fake.shape[0]))
     fbuff = np.zeros((int(n*fake.shape[1]), int(n*fake.shape[2]), int(fake.shape[3])), dtype=np.uint8)
@@ -278,9 +284,9 @@ def visualize(fake, real):
     rbuff = np.zeros((int(n*real.shape[1]), int(n*real.shape[2]), int(real.shape[3])), dtype=np.uint8)
     for i, img in enumerate(real):
         fill_buf(rbuff, i, img, real.shape[1:3])
-        
+
     #Create a matplotlib figure with two subplots: one for the real and the other for the fake
-    #fill each plot with our buffer array, which creates the image
+    #fill each plot with the buffer array, which creates the image
     fig = plt.figure()
     ax1 = fig.add_subplot(2,2,1)
     ax1.imshow(fbuff)
@@ -288,22 +294,22 @@ def visualize(fake, real):
     ax2.imshow(rbuff)
     plt.show()
 ```
- 
+
 ## Fit the Model
 Training the DCGAN is a complex process that requires multiple steps.
-To fit the model, for every batch of data in our dataset:
+To fit the model, for every batch of data in the MNIST dataset:
 
-1. Use the Z vector, which contains our random numbers to do a forward pass through our generator. This outputs the "fake" image, since it's created from our generator.
+1. Use the Z vector, which contains the random numbers to do a forward pass through the Generator network. This outputs the "fake" image, since it is created from the Generator.
 
-2. Use the fake image as the input to do a forward and backwards pass through the discriminator network. We set our labels for our logistic regression to 0 to represent that this is a fake image. This trains the discriminator to learn what a fake image looks like. We save the gradient produced in backpropogation for the next step.
+2. Use the fake image as the input to do a forward and backward pass through the Discriminator network. We set the labels for logistic regression to 0 to represent that this is a fake image. This trains the Discriminator to learn what a fake image looks like. We save the gradient produced in backpropagation for the next step.
 
-3. Do a forwards and backwards pass through the discriminator using a real image from our dataset. Our label for logistic regression will now be 1 to represent real images, so our discriminator can learn to recognize a real image.
+3. Do a forward and backward pass through the Discriminator using a real image from the MNIST dataset. The label for logistic regression will now be 1 to represent the real images, so the Discriminator can learn to recognize a real image.
 
-4. Update the discriminator by adding the result of the gradient generated during backpropogation on the fake image with the gradient from backpropogation on the real image.
+4. Update the Discriminator by adding the result of the gradient generated during backpropagation on the fake image with the gradient from backpropagation on the real image.
 
-5. Now that the discriminator has been updated for the this batch, we still need to update the generator. First, do a forward and backwards pass with the same batch on the updated discriminator, to produce a new gradient. Use the new gradient to do a backwards pass
+5. Now that the Discriminator has been updated for the this data batch, we still need to update the Generator. First, do a forward and backwards pass with the same data batch on the updated Discriminator, to produce a new gradient. Use the new gradient to do a backwards pass
 
-Here's the main training loop for our DCGAN:
+Here is the main training loop for the DCGAN:
 
 ```python
 # =============train===============
@@ -317,29 +323,29 @@ for epoch in range(1):
         generator.forward(rbatch, is_train=True)
         #Output of training batch is the 64x64x3 image
         outG = generator.get_outputs()
-        
+
         #Pass the generated (fake) image through the discriminator, and save the gradient
         #Label (for logistic regression) is an array of 0's since this image is fake
         label = mx.nd.zeros((batch_size,), ctx=ctx)
         #Forward pass on the output of the discriminator network
         discriminator.forward(mx.io.DataBatch(outG, [label]), is_train=True)
-        #Do the backwards pass and save the gradient
+        #Do the backward pass and save the gradient
         discriminator.backward()
         gradD = [[grad.copyto(grad.context) for grad in grads] for grads in discriminator._exec_group.grad_arrays]
-        
+
         #Pass a batch of real images from MNIST through the discriminator
         #Set the label to be an array of 1's because these are the real images
         label[:] = 1
         batch.label = [label]
         #Forward pass on a batch of MNIST images
         discriminator.forward(batch, is_train=True)
-        #Do the backwards pass and add the saved gradient from the fake images to the gradient 
+        #Do the backward pass and add the saved gradient from the fake images to the gradient
         #generated by this backwards pass on the real images
         discriminator.backward()
         for gradsr, gradsf in zip(discriminator._exec_group.grad_arrays, gradD):
             for gradr, gradf in zip(gradsr, gradsf):
                 gradr += gradf
-        #Update gradient on the discriminator 
+        #Update gradient on the discriminator
         discriminator.update()
 
         #Now that we've updated the discriminator, let's update the generator
@@ -353,7 +359,7 @@ for epoch in range(1):
         generator.backward(diffD)
         #Update the gradients on the generator
         generator.update()
-        
+
         #Increment to the next batch, printing every 50 batches
         i += 1
         if i % 50 == 0:
@@ -364,20 +370,20 @@ for epoch in range(1):
             visualize(outG[0].asnumpy(), batch.data[0].asnumpy())
 ```
 
-This causes our GAN to train and we can visualize the progress that we're making as our networks train. After every 25 iterations, we're calling the visualize function that we created earlier, which creates the visual plots during training.
+This will train the GAN network and visualize the progress that we are making as the networks are trained. After every 25 iterations, we are calling the visualize function that we created earlier, which plots the intermediate results.
 
-The plot on our left will represent what our generator created (the fake image) in the most recent iteration. The plot on the right will represent the original (real) image from the MNIST dataset that was inputted to the discriminator on the same iteration.
+The plot on the left will represent what the Generator created (the fake image) in the most recent iteration. The plot on the right will represent the Original (real) image from the MNIST dataset that was inputted to the Discriminator on the same iteration.
 
-As training goes on the generator becomes better at generating realistic images. You can see this happening since images on the left become closer to the original dataset with each iteration.
+As the training goes on, the Generator becomes better at generating realistic images. You can see this happening since the images on the left becomes closer to the original dataset with each iteration.
 
 ## Summary
 
-We've now sucessfully used Apache MXNet to train a Deep Convolutional GAN using the MNIST dataset.
+We have now successfully used Apache MXNet to train a Deep Convolutional Generative Adversarial Neural Networks (DCGAN) using the MNIST dataset.
 
-As a result, we've created two neural nets: a generator, which is able to create images of handwritten digits from random numbers, and a discriminator, which is able to take an image and determine if it is an image of handwritten digits.
+As a result, we have created two neural nets: a Generator, which is able to create images of handwritten digits from random numbers, and a Discriminator, which is able to take an image and determine if it is an image of handwritten digits.
 
-Along the way, we've learned how to do the image manipulation and visualization that's associted with training deep neural nets. We've also learned how to some of MXNet's advanced training functionality to fit our model.
+Along the way, we have learned how to do the image manipulation and visualization that is associated with the training of deep neural nets. We have also learned how to use MXNet's Module APIs to perform advanced model training functionality to fit the model.
 
 ## Acknowledgements
-This tutorial is based on [MXNet DCGAN codebase](https://github.com/apache/incubator-mxnet/blob/master/example/gan/dcgan.py), 
-[The original paper on GANs](https://arxiv.org/abs/1406.2661), as well as [this paper on deep convolutional GANs](https://arxiv.org/abs/1511.06434).
\ No newline at end of file
+This tutorial is based on [MXNet DCGAN codebase](https://github.com/apache/incubator-mxnet/blob/master/example/gan/dcgan.py),
+[The original paper on GANs](https://arxiv.org/abs/1406.2661), as well as [this paper on deep convolutional GANs](https://arxiv.org/abs/1511.06434).
diff --git a/versions/master/_static/mxnet-theme/index.html b/versions/master/_static/mxnet-theme/index.html
index 2a4daae..191d602 100644
--- a/versions/master/_static/mxnet-theme/index.html
+++ b/versions/master/_static/mxnet-theme/index.html
@@ -3,11 +3,7 @@
 <div class="row">
 <div class="col-lg-12">
 <div class="col-sm-6 col-xs-12" id="banner-title"><span>Apache MXNet</span>
-<p id="landing-title">is a flexible and efficient library for deep learning.</p>
-<p id="intro">
-                Building a high-performance deep learning library requires many system-level design decisions. In this design note, we share the rationale for the specific 
-                choices made when designing MXNet. We imagine that these insights may be useful to both deep learning practitioners and builders of other deep learning systems.
-            </p>
+<p id="landing-title">A flexible and efficient library for deep learning.</p>
 <div id="landing-btn-blk">
 <div id="install_blk">
 <a href="install/index.html" id="install_btn">Install</a>
@@ -25,17 +21,17 @@
 <div class="container">
 <div class="row">
 <div class="col-lg-4 col-sm-12">
+<h3>Apache MXNet 1.0 Released</h3>
+<p>We're excited to announce the release of MXNet 1.0! Check out the release notes for latest updates.</p>
+<a href="https://github.com/apache/incubator-mxnet/releases/tag/1.0.0">Learn More</a>
+</div>
+<div class="col-lg-4 col-sm-12">
 <h3>Introducing - Gluon</h3>
 <p>We’re happy to introduce a new elegant, easy to use, imperative interface for MXNet.
         </p>
 <a href="http://gluon.mxnet.io">Learn More</a>
 </div>
 <div class="col-lg-4 col-sm-12">
-<h3>MXNet 0.12.0 Released</h3>
-<p>We're excited to announce the release of MXNet 0.12.0! Check out the release notes for latest updates.</p>
-<a href="https://github.com/dmlc/mxnet/releases">Learn More</a>
-</div>
-<div class="col-lg-4 col-sm-12">
 <h3>MXNet Joining Apache</h3>
 <p>We’re excited to announce that MXNet has been accepted to the Apache Incubator.
         </p>
@@ -49,10 +45,10 @@
 <div class="row">
 <div class="col-lg-4 col-sm-12" id="model-zoo-blk">
 <span class="glyphicon glyphicon-folder-open"></span>
-<h2>Model Zoo</h2>
-<p>Off the shelf pre-trained models. Fast implementations of many state-of-art models.</p>
+<h2>Gluon Model Zoo</h2>
+<p>One-click pre-trained models, included in Gluon. Fast implementations of many state-of-the-art models, for plug-and-play effortless use.</p>
 <div class="util-btn">
-<a href="model_zoo/index.html" id="model-zoo-link">Model zoo</a>
+<a href="api/python/gluon/model_zoo.html" id="model-zoo-link">Gluon model zoo</a>
 </div>
 </div>
 <div class="col-lg-4 col-sm-12" id="example-blk">
diff --git a/versions/master/_static/mxnet-theme/layout.html b/versions/master/_static/mxnet-theme/layout.html
index f00bd74..be57176 100644
--- a/versions/master/_static/mxnet-theme/layout.html
+++ b/versions/master/_static/mxnet-theme/layout.html
@@ -144,7 +144,7 @@
 
     <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block">
     {%- include "navbar.html" %}
     
diff --git a/versions/master/_static/mxnet.css b/versions/master/_static/mxnet.css
index 6448323..20db74b 100644
--- a/versions/master/_static/mxnet.css
+++ b/versions/master/_static/mxnet.css
@@ -132,6 +132,8 @@ img {
 .navbar .container {
     position: relative;
     left: 20px;
+    width: 100%;
+    align-content: center;
 }
 
 @media screen and (max-width: 510px) {
@@ -174,6 +176,8 @@ img {
    padding-top: 0;
     margin-top: 0;
     width: 125px;
+    padding-left: 25px;
+    padding-right: 200px;
 }
 
 #logo {
diff --git a/versions/master/api/c++/index.html b/versions/master/api/c++/index.html
index 7ed96a9..1db758c 100644
--- a/versions/master/api/c++/index.html
+++ b/versions/master/api/c++/index.html
@@ -52,7 +52,7 @@
 <link href="../julia/index.html" rel="prev" title="MXNet - Julia API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/julia/index.html b/versions/master/api/julia/index.html
index eb30ec6..3ff2ce3 100644
--- a/versions/master/api/julia/index.html
+++ b/versions/master/api/julia/index.html
@@ -52,7 +52,7 @@
 <link href="../r/index.html" rel="prev" title="MXNet - R API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/perl/index.html b/versions/master/api/perl/index.html
index 4cba8d1..74d21de 100644
--- a/versions/master/api/perl/index.html
+++ b/versions/master/api/perl/index.html
@@ -52,7 +52,7 @@
 <link href="../scala/kvstore.html" rel="prev" title="KVStore API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/perl/io.html b/versions/master/api/perl/io.html
index 1d0370b..f76eef8 100644
--- a/versions/master/api/perl/io.html
+++ b/versions/master/api/perl/io.html
@@ -53,7 +53,7 @@
 <link href="symbol.html" rel="prev" title="MXNet Perl Symbolic API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/perl/kvstore.html b/versions/master/api/perl/kvstore.html
index 845801f..95faec5 100644
--- a/versions/master/api/perl/kvstore.html
+++ b/versions/master/api/perl/kvstore.html
@@ -53,7 +53,7 @@
 <link href="ndarray.html" rel="prev" title="NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/perl/module.html b/versions/master/api/perl/module.html
index 2c17748..f05f886 100644
--- a/versions/master/api/perl/module.html
+++ b/versions/master/api/perl/module.html
@@ -53,7 +53,7 @@
 <link href="index.html" rel="prev" title="MXNet - Perl API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/perl/ndarray.html b/versions/master/api/perl/ndarray.html
index 52a0ebd..0adb44f 100644
--- a/versions/master/api/perl/ndarray.html
+++ b/versions/master/api/perl/ndarray.html
@@ -53,7 +53,7 @@
 <link href="io.html" rel="prev" title="Data Loading API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/perl/symbol.html b/versions/master/api/perl/symbol.html
index 0c8ffee..3b0ea16 100644
--- a/versions/master/api/perl/symbol.html
+++ b/versions/master/api/perl/symbol.html
@@ -53,7 +53,7 @@
 <link href="module.html" rel="prev" title="Module API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/autograd/autograd.html b/versions/master/api/python/autograd/autograd.html
index 9a4c4b9..bdfbc5b 100644
--- a/versions/master/api/python/autograd/autograd.html
+++ b/versions/master/api/python/autograd/autograd.html
@@ -53,7 +53,7 @@
 <link href="../executor/executor.html" rel="prev" title="Executor and Executor Manager"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -577,7 +577,7 @@ backward function will be used instead of the default chain-rule.
 You can also cast to numpy array and back for some operations in
 forward and backward.</p>
 <p>For example, a stable sigmoid function can be defined as:</p>
-<div class="highlight-python"><div class="highlight"><pre><span></span><span class="k">class</span> <span class="nc">sigmoid</span><span class="p">(</span><span class="n">Function</span><span class="p">):</span>
+<div class="highlight-python"><div class="highlight"><pre><span></span><span class="k">class</span> <span class="nc">sigmoid</span><span class="p">(</span><span class="n">mx</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">Function</span><span class="p">):</span>
     <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
         <span class="n">y</span> <span class="o">=</span> <span class="mi">1</span> <span class="o">/</span> <span class="p">(</span><span class="mi">1</span> <span class="o">+</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">))</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">save_for_backward</span><span class="p">(</span><span class="n">y</span><span class="p">)</span>
@@ -590,6 +590,17 @@ forward and backward.</p>
         <span class="k">return</span> <span class="n">y</span> <span class="o">*</span> <span class="p">(</span><span class="mi">1</span><span class="o">-</span><span class="n">y</span><span class="p">)</span>
 </pre></div>
 </div>
+<p>Then, the function can be used in the following way:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">func</span> <span class="o">=</span> <span class="n">sigmoid</span><span class="p">()</span>
+<span class="n">x</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">uniform</span><span class="p">(</span><span class="n">shape</span><span class="o">=</span><span class="p">(</span><span class="mi">10</span><span class="p">,))</span>
+<span class="n">x</span><span class="o">.</span><span class="n">attach_grad</span><span class="p">()</span>
+
+<span class="k">with</span> <span class="n">mx</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">record</span><span class="p">():</span>
+    <span class="n">m</span> <span class="o">=</span> <span class="n">func</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
+    <span class="n">m</span><span class="o">.</span><span class="n">backward</span><span class="p">()</span>
+<span class="n">dx</span> <span class="o">=</span> <span class="n">x</span><span class="o">.</span><span class="n">grad</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
+</pre></div>
+</div>
 <dl class="method">
 <dt id="mxnet.autograd.Function.forward">
 <code class="descname">forward</code><span class="sig-paren">(</span><em>*inputs</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/mxnet/autograd.html#Function.forward"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.autograd.Function.forward" title="Permalink to this definition">¶</a></dt>
diff --git a/versions/master/api/python/callback/callback.html b/versions/master/api/python/callback/callback.html
index 7e04d40..74a0519 100644
--- a/versions/master/api/python/callback/callback.html
+++ b/versions/master/api/python/callback/callback.html
@@ -53,7 +53,7 @@
 <link href="../optimization/optimization.html" rel="prev" title="Optimization: initialize and update weights"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/executor/executor.html b/versions/master/api/python/executor/executor.html
index 090f84f..20b0eca 100644
--- a/versions/master/api/python/executor/executor.html
+++ b/versions/master/api/python/executor/executor.html
@@ -53,7 +53,7 @@
 <link href="../module/module.html" rel="prev" title="Module API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/gluon/contrib.html b/versions/master/api/python/gluon/contrib.html
index 875e5a6..552c7e6 100644
--- a/versions/master/api/python/gluon/contrib.html
+++ b/versions/master/api/python/gluon/contrib.html
@@ -53,7 +53,7 @@
 <link href="model_zoo.html" rel="prev" title="Gluon Model Zoo"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/gluon/data.html b/versions/master/api/python/gluon/data.html
index 8b2f889..1c808de 100644
--- a/versions/master/api/python/gluon/data.html
+++ b/versions/master/api/python/gluon/data.html
@@ -53,7 +53,7 @@
 <link href="loss.html" rel="prev" title="Gluon Loss API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/gluon/gluon.html b/versions/master/api/python/gluon/gluon.html
index 2243283..9ed1e86 100644
--- a/versions/master/api/python/gluon/gluon.html
+++ b/versions/master/api/python/gluon/gluon.html
@@ -53,7 +53,7 @@
 <link href="../autograd/autograd.html" rel="prev" title="Autograd Package"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/gluon/loss.html b/versions/master/api/python/gluon/loss.html
index 2450aa8..bf4c216 100644
--- a/versions/master/api/python/gluon/loss.html
+++ b/versions/master/api/python/gluon/loss.html
@@ -53,7 +53,7 @@
 <link href="rnn.html" rel="prev" title="Gluon Recurrent Neural Network API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/gluon/model_zoo.html b/versions/master/api/python/gluon/model_zoo.html
index 089a064..4a40e1b 100644
--- a/versions/master/api/python/gluon/model_zoo.html
+++ b/versions/master/api/python/gluon/model_zoo.html
@@ -53,7 +53,7 @@
 <link href="data.html" rel="prev" title="Gluon Data API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -282,8 +282,8 @@ and pre-trained models to help bootstrap machine learning applications.</p>
 <span class="n">densenet</span> <span class="o">=</span> <span class="n">vision</span><span class="o">.</span><span class="n">densenet_161</span><span class="p">()</span>
 </pre></div>
 </div>
-<p>We provide pre-trained models for all the models except ResNet V2.
-These can constructed by passing <code class="docutils literal"><span class="pre">pretrained=True</span></code>:</p>
+<p>We provide pre-trained models for all the listed models.
+These models can constructed by passing <code class="docutils literal"><span class="pre">pretrained=True</span></code>:</p>
 <div class="code highlight-python"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">mxnet.gluon.model_zoo</span> <span class="kn">import</span> <span class="n">vision</span>
 <span class="n">resnet18</span> <span class="o">=</span> <span class="n">vision</span><span class="o">.</span><span class="n">resnet18_v1</span><span class="p">(</span><span class="n">pretrained</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
 <span class="n">alexnet</span> <span class="o">=</span> <span class="n">vision</span><span class="o">.</span><span class="n">alexnet</span><span class="p">(</span><span class="n">pretrained</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
@@ -303,6 +303,238 @@ The transformation should preferrably happen at preprocessing. You can use
                                       <span class="n">std</span><span class="o">=</span><span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mf">0.229</span><span class="p">,</span> <span class="mf">0.224</span><span class="p">,</span> <span class="mf">0.225</span><span class="p">]))</span>
 </pre></div>
 </div>
+<p>The following table summarizes the available models.</p>
+<table border="1" class="docutils">
+<colgroup>
+<col width="17%"/>
+<col width="17%"/>
+<col width="17%"/>
+<col width="17%"/>
+<col width="17%"/>
+<col width="17%"/>
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">Alias</th>
+<th class="head">Network</th>
+<th class="head"># Parameters</th>
+<th class="head">Top-1 Accuracy</th>
+<th class="head">Top-5 Accuracy</th>
+<th class="head">Origin</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>alexnet</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1404.5997">AlexNet</a></td>
+<td>61,100,840</td>
+<td>0.5492</td>
+<td>0.7803</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>densenet121</td>
+<td><a class="reference external" href="https://arxiv.org/pdf/1608.06993.pdf">DenseNet-121</a></td>
+<td>8,062,504</td>
+<td>0.7497</td>
+<td>0.9225</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>densenet161</td>
+<td><a class="reference external" href="https://arxiv.org/pdf/1608.06993.pdf">DenseNet-161</a></td>
+<td>28,900,936</td>
+<td>0.7770</td>
+<td>0.9380</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>densenet169</td>
+<td><a class="reference external" href="https://arxiv.org/pdf/1608.06993.pdf">DenseNet-169</a></td>
+<td>14,307,880</td>
+<td>0.7617</td>
+<td>0.9317</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>densenet201</td>
+<td><a class="reference external" href="https://arxiv.org/pdf/1608.06993.pdf">DenseNet-201</a></td>
+<td>20,242,984</td>
+<td>0.7732</td>
+<td>0.9362</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>inceptionv3</td>
+<td><a class="reference external" href="http://arxiv.org/abs/1512.00567">Inception V3 299x299</a></td>
+<td>23,869,000</td>
+<td>0.7755</td>
+<td>0.9364</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>mobilenet0.25</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1704.04861">MobileNet 0.25</a></td>
+<td>475,544</td>
+<td>0.5185</td>
+<td>0.7608</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-odd"><td>mobilenet0.5</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1704.04861">MobileNet 0.5</a></td>
+<td>1,342,536</td>
+<td>0.6307</td>
+<td>0.8475</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-even"><td>mobilenet0.75</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1704.04861">MobileNet 0.75</a></td>
+<td>2,601,976</td>
+<td>0.6738</td>
+<td>0.8782</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-odd"><td>mobilenet1.0</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1704.04861">MobileNet 1.0</a></td>
+<td>4,253,864</td>
+<td>0.7105</td>
+<td>0.9006</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-even"><td>resnet18_v1</td>
+<td><a class="reference external" href="http://arxiv.org/abs/1512.03385">ResNet-18 V1</a></td>
+<td>11,699,112</td>
+<td>0.6803</td>
+<td>0.8818</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>resnet34_v1</td>
+<td><a class="reference external" href="http://arxiv.org/abs/1512.03385">ResNet-34 V1</a></td>
+<td>21,814,696</td>
+<td>0.7202</td>
+<td>0.9066</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>resnet50_v1</td>
+<td><a class="reference external" href="http://arxiv.org/abs/1512.03385">ResNet-50 V1</a></td>
+<td>25,629,032</td>
+<td>0.7540</td>
+<td>0.9266</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-odd"><td>resnet101_v1</td>
+<td><a class="reference external" href="http://arxiv.org/abs/1512.03385">ResNet-101 V1</a></td>
+<td>44,695,144</td>
+<td>0.7693</td>
+<td>0.9334</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-even"><td>resnet152_v1</td>
+<td><a class="reference external" href="http://arxiv.org/abs/1512.03385">ResNet-152 V1</a></td>
+<td>60,404,072</td>
+<td>0.7727</td>
+<td>0.9353</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-odd"><td>resnet18_v2</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1603.05027">ResNet-18 V2</a></td>
+<td>11,695,796</td>
+<td>0.6961</td>
+<td>0.8901</td>
+<td>Trained with <a class="reference external" href="https://github.com/apache/incubator-mxnet/blob/4dcd96ae2f6820e01455079d00f49db1cd21eda9/example/gluon/image_classification.py">script</a></td>
+</tr>
+<tr class="row-even"><td>resnet34_v2</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1603.05027">ResNet-34 V2</a></td>
+<td>21,811,380</td>
+<td>0.7324</td>
+<td>0.9125</td>
+<td>Trained with <a class="reference external" href="https://github.com/apache/incubator-mxnet/blob/4dcd96ae2f6820e01455079d00f49db1cd21eda9/example/gluon/image_classification.py">script</a></td>
+</tr>
+<tr class="row-odd"><td>resnet50_v2</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1603.05027">ResNet-50 V2</a></td>
+<td>25,595,060</td>
+<td>0.7622</td>
+<td>0.9297</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-even"><td>resnet101_v2</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1603.05027">ResNet-101 V2</a></td>
+<td>44,639,412</td>
+<td>0.7747</td>
+<td>0.9375</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-odd"><td>resnet152_v2</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1603.05027">ResNet-152 V2</a></td>
+<td>60,329,140</td>
+<td>0.7833</td>
+<td>0.9409</td>
+<td>Trained with <a class="reference external" href="https://github.com/zhreshold/mxnet/blob/2fbfdbcbacff8b738bd9f44e9c8cefc84d6dfbb5/example/gluon/train_imagenet.py">script</a></td>
+</tr>
+<tr class="row-even"><td>squeezenet1.0</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1602.07360">SqueezeNet 1.0</a></td>
+<td>1,248,424</td>
+<td>0.5611</td>
+<td>0.7909</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>squeezenet1.1</td>
+<td><a class="reference external" href="https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1">SqueezeNet 1.1</a></td>
+<td>1,235,496</td>
+<td>0.5496</td>
+<td>0.7817</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>vgg11</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-11</a></td>
+<td>132,863,336</td>
+<td>0.6662</td>
+<td>0.8734</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>vgg13</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-13</a></td>
+<td>133,047,848</td>
+<td>0.6774</td>
+<td>0.8811</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>vgg16</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-16</a></td>
+<td>138,357,544</td>
+<td>0.6986</td>
+<td>0.8945</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>vgg19</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-19</a></td>
+<td>143,667,240</td>
+<td>0.7072</td>
+<td>0.8988</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>vgg11_bn</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-11 with batch normalization</a></td>
+<td>132,874,344</td>
+<td>0.6859</td>
+<td>0.8872</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>vgg13_bn</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-13 with batch normalization</a></td>
+<td>133,059,624</td>
+<td>0.6884</td>
+<td>0.8882</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-even"><td>vgg16_bn</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-16 with batch normalization</a></td>
+<td>138,374,440</td>
+<td>0.7142</td>
+<td>0.9043</td>
+<td>Converted from pytorch vision</td>
+</tr>
+<tr class="row-odd"><td>vgg19_bn</td>
+<td><a class="reference external" href="https://arxiv.org/abs/1409.1556">VGG-19 with batch normalization</a></td>
+<td>143,689,256</td>
+<td>0.7241</td>
+<td>0.9093</td>
+<td>Converted from pytorch vision</td>
+</tr>
+</tbody>
+</table>
 <table border="1" class="longtable docutils">
 <colgroup>
 <col width="10%"/>
@@ -602,8 +834,8 @@ The transformation should preferrably happen at preprocessing. You can use
 <span class="n">densenet</span> <span class="o">=</span> <span class="n">vision</span><span class="o">.</span><span class="n">densenet_161</span><span class="p">()</span>
 </pre></div>
 </div>
-<p>We provide pre-trained models for all the models except ResNet V2.
-These can constructed by passing <code class="docutils literal"><span class="pre">pretrained=True</span></code>:</p>
+<p>We provide pre-trained models for all the listed models.
+These models can constructed by passing <code class="docutils literal"><span class="pre">pretrained=True</span></code>:</p>
 <div class="code highlight-python"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">mxnet.gluon.model_zoo</span> <span class="kn">import</span> <span class="n">vision</span>
 <span class="n">resnet18</span> <span class="o">=</span> <span class="n">vision</span><span class="o">.</span><span class="n">resnet18_v1</span><span class="p">(</span><span class="n">pretrained</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
 <span class="n">alexnet</span> <span class="o">=</span> <span class="n">vision</span><span class="o">.</span><span class="n">alexnet</span><span class="p">(</span><span class="n">pretrained</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
diff --git a/versions/master/api/python/gluon/nn.html b/versions/master/api/python/gluon/nn.html
index 517efac..6be5792 100644
--- a/versions/master/api/python/gluon/nn.html
+++ b/versions/master/api/python/gluon/nn.html
@@ -53,7 +53,7 @@
 <link href="gluon.html" rel="prev" title="Gluon Package"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/gluon/rnn.html b/versions/master/api/python/gluon/rnn.html
index 69d5c73..9c05dbc 100644
--- a/versions/master/api/python/gluon/rnn.html
+++ b/versions/master/api/python/gluon/rnn.html
@@ -53,7 +53,7 @@
 <link href="nn.html" rel="prev" title="Gluon Neural Network Layers"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/image/image.html b/versions/master/api/python/image/image.html
index 59b33b4..a16cdf5 100644
--- a/versions/master/api/python/image/image.html
+++ b/versions/master/api/python/image/image.html
@@ -53,7 +53,7 @@
 <link href="../io/io.html" rel="prev" title="Data Loading API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/index.html b/versions/master/api/python/index.html
index 256492a..3891fb5 100644
--- a/versions/master/api/python/index.html
+++ b/versions/master/api/python/index.html
@@ -52,7 +52,7 @@
 <link href="../../index.html" rel="prev" title="Contents"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/io/io.html b/versions/master/api/python/io/io.html
index 77773ce..9127acf 100644
--- a/versions/master/api/python/io/io.html
+++ b/versions/master/api/python/io/io.html
@@ -53,7 +53,7 @@
 <link href="../kvstore/kvstore.html" rel="prev" title="KVStore API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -288,7 +288,7 @@ shapes and name.</p>
 </colgroup>
 <tbody valign="top">
 <tr class="row-odd"><td><a class="reference internal" href="#mxnet.io.NDArrayIter" title="mxnet.io.NDArrayIter"><code class="xref py py-obj docutils literal"><span class="pre">io.NDArrayIter</span></code></a></td>
-<td>Returns an iterator for <code class="docutils literal"><span class="pre">mx.nd.NDArray</span></code>, <code class="docutils literal"><span class="pre">numpy.ndarray</span></code>, <code class="docutils literal"><span class="pre">h5py.Dataset</span></code> or <code class="docutils literal"><span class="pre">mx.nd.sparse.CSRNDArray</span></code>.</td>
+<td>Returns an iterator for <code class="docutils literal"><span class="pre">mx.nd.NDArray</span></code>, <code class="docutils literal"><span class="pre">numpy.ndarray</span></code>, <code class="docutils literal"><span class="pre">h5py.Dataset</span></code> <code class="docutils literal"><span class="pre">mx.nd.sparse.CSRNDArray</span></code> or <code class="docutils literal"><span class="pre">scipy.sparse.csr_matrix</span></code>.</td>
 </tr>
 <tr class="row-even"><td><a class="reference internal" href="#mxnet.io.CSVIter" title="mxnet.io.CSVIter"><code class="xref py py-obj docutils literal"><span class="pre">io.CSVIter</span></code></a></td>
 <td>Returns the CSV file iterator.</td>
@@ -717,7 +717,7 @@ in iter[i].provide_data.</li>
 <dt id="mxnet.io.NDArrayIter">
 <em class="property">class </em><code class="descclassname">mxnet.io.</code><code class="descname">NDArrayIter</code><span class="sig-paren">(</span><em>data</em>, <em>label=None</em>, <em>batch_size=1</em>, <em>shuffle=False</em>, <em>last_batch_handle='pad'</em>, <em>data_name='data'</em>, <em>label_name='softmax_label'</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/mxnet/io.html#NDArrayIter"><span class="viewcode-link">[source]</span></a><a c [...]
 <dd><p>Returns an iterator for <code class="docutils literal"><span class="pre">mx.nd.NDArray</span></code>, <code class="docutils literal"><span class="pre">numpy.ndarray</span></code>, <code class="docutils literal"><span class="pre">h5py.Dataset</span></code>
-or <code class="docutils literal"><span class="pre">mx.nd.sparse.CSRNDArray</span></code>.</p>
+<code class="docutils literal"><span class="pre">mx.nd.sparse.CSRNDArray</span></code> or <code class="docutils literal"><span class="pre">scipy.sparse.csr_matrix</span></code>.</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">data</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">arange</span><span class="p">(</span><span class="mi">40</span><span class="p">)</span><span class="o">.</span><span class="n">reshape</span><span class="p">((</span><span class="mi">10</span><span class="p">,</span><span class="mi">2</span><span class="p">,</span><span clas [...]
 <span class="gp">>>> </span><span class="n">labels</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">([</span><span class="mi">10</span><span class="p">,</span> <span class="mi">1</span><span class="p">])</span>
 <span class="gp">>>> </span><span class="n">dataiter</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">io</span><span class="o">.</span><span class="n">NDArrayIter</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="bp">True</span><span class="p">,</span> <span class="n">last_batch_handle</span><s [...]
diff --git a/versions/master/api/python/kvstore/kvstore.html b/versions/master/api/python/kvstore/kvstore.html
index 3886c24..fff52cb 100644
--- a/versions/master/api/python/kvstore/kvstore.html
+++ b/versions/master/api/python/kvstore/kvstore.html
@@ -53,7 +53,7 @@
 <link href="../gluon/contrib.html" rel="prev" title="Gluon Contrib API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/metric/metric.html b/versions/master/api/python/metric/metric.html
index 77ba532..001c8a2 100644
--- a/versions/master/api/python/metric/metric.html
+++ b/versions/master/api/python/metric/metric.html
@@ -53,7 +53,7 @@
 <link href="../callback/callback.html" rel="prev" title="Callback API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/model.html b/versions/master/api/python/model.html
index c7d57df..fcbc688 100644
--- a/versions/master/api/python/model.html
+++ b/versions/master/api/python/model.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -1951,6 +1951,29 @@ by <a class="reference internal" href="optimization/optimization.html#mxnet.opti
 </table>
 </dd></dl>
 <dl class="class">
+<dt id="mxnet.optimizer.FTML">
+<em class="property">class </em><code class="descclassname">mxnet.optimizer.</code><code class="descname">FTML</code><span class="sig-paren">(</span><em>beta1=0.6</em>, <em>beta2=0.999</em>, <em>epsilon=1e-08</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="../../_modules/mxnet/optimizer.html#FTML"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.optimizer.FTML" title="Permalink to this definition">¶</a></dt>
+<dd><p>The FTML optimizer.</p>
+<p>This class implements the optimizer described in
+<em>FTML - Follow the Moving Leader in Deep Learning</em>,
+available at <a class="reference external" href="http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf">http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf</a>.</p>
+<p>This optimizer accepts the following parameters in addition to those accepted
+by <a class="reference internal" href="optimization/optimization.html#mxnet.optimizer.Optimizer" title="mxnet.optimizer.Optimizer"><code class="xref py py-class docutils literal"><span class="pre">Optimizer</span></code></a>.</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
+<li><strong>beta1</strong> (<em>float, optional</em>) – 0 < beta1 < 1. Generally close to 0.5.</li>
+<li><strong>beta2</strong> (<em>float, optional</em>) – 0 < beta2 < 1. Generally close to 1.</li>
+<li><strong>epsilon</strong> (<em>float, optional</em>) – Small value to avoid division by 0.</li>
+</ul>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="class">
 <dt id="mxnet.optimizer.DCASGD">
 <em class="property">class </em><code class="descclassname">mxnet.optimizer.</code><code class="descname">DCASGD</code><span class="sig-paren">(</span><em>momentum=0.0</em>, <em>lamda=0.04</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="../../_modules/mxnet/optimizer.html#DCASGD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.optimizer.DCASGD" title="Permalink to this definition">¶</a></dt>
 <dd><p>The DCASGD optimizer.</p>
diff --git a/versions/master/api/python/module/module.html b/versions/master/api/python/module/module.html
index 6b3171f..a2595cf 100644
--- a/versions/master/api/python/module/module.html
+++ b/versions/master/api/python/module/module.html
@@ -53,7 +53,7 @@
 <link href="../symbol/rnn.html" rel="prev" title="RNN Cell Symbol API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/ndarray/contrib.html b/versions/master/api/python/ndarray/contrib.html
index 9f7ca49..83a9513 100644
--- a/versions/master/api/python/ndarray/contrib.html
+++ b/versions/master/api/python/ndarray/contrib.html
@@ -53,7 +53,7 @@
 <link href="sparse.html" rel="prev" title="Sparse NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -724,7 +724,7 @@ SparseEmbedding(x, y, 4, 5) = [[[  5.,   6.,   7.,   8.,   9.],
                                [ 10.,  11.,  12.,  13.,  14.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L253</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L254</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -750,6 +750,252 @@ The output of this function.</p>
 </table>
 </dd></dl>
 <dl class="function">
+<dt id="mxnet.ndarray.contrib.bipartite_matching">
+<code class="descclassname">mxnet.ndarray.contrib.</code><code class="descname">bipartite_matching</code><span class="sig-paren">(</span><em>data=None</em>, <em>is_ascend=_Null</em>, <em>threshold=_Null</em>, <em>topk=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarray.contrib.bipartite_matching" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="docutils">
+<dt>Compute bipartite matching.</dt>
+<dd><p class="first">The matching is performed on score matrix with shape [B, N, M]
+- B: batch_size
+- N: number of rows to match
+- M: number of columns as reference to be matched against.</p>
+<p>Returns:
+x : matched column indices. -1 indicating non-matched elements in rows.
+y : matched row indices.</p>
+<p>Note:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>Zero gradients are back-propagated in this op for now.
+</pre></div>
+</div>
+<p>Example:</p>
+<div class="last highlight-python"><div class="highlight"><pre><span></span>s = [[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]]
+x, y = bipartite_matching(x, threshold=1e-12, is_ascend=False)
+x = [1, -1, 0]
+y = [2, 0]
+</pre></div>
+</div>
+</dd>
+</dl>
+<p>Defined in src/operator/contrib/bounding_box.cc:L169</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>data</strong> (<a class="reference internal" href="ndarray.html#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – The input</li>
+<li><strong>is_ascend</strong> (<em>boolean, optional, default=0</em>) – Use ascend order for scores instead of descending. Please set threshold accordingly.</li>
+<li><strong>threshold</strong> (<em>float, required</em>) – Ignore matching when score < thresh, if is_ascend=false, or ignore score > thresh, if is_ascend=true.</li>
+<li><strong>topk</strong> (<em>int, optional, default='-1'</em>) – Limit the number of matches to topk, set -1 for no limit</li>
+<li><strong>out</strong> (<em>NDArray, optional</em>) – The output NDArray to hold the result.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>out</strong> –
+The output of this function.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArrays</p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
+<dt id="mxnet.ndarray.contrib.box_iou">
+<code class="descclassname">mxnet.ndarray.contrib.</code><code class="descname">box_iou</code><span class="sig-paren">(</span><em>lhs=None</em>, <em>rhs=None</em>, <em>format=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarray.contrib.box_iou" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="docutils">
+<dt>Bounding box overlap of two arrays.</dt>
+<dd><p class="first">The overlap is defined as Intersection-over-Union, aka, IOU.
+- lhs: (a_1, a_2, ..., a_n, 4) array
+- rhs: (b_1, b_2, ..., b_n, 4) array
+- output: (a_1, a_2, ..., a_n, b_1, b_2, ..., b_n) array</p>
+<p>Note:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>Zero gradients are back-propagated in this op for now.
+</pre></div>
+</div>
+<p>Example:</p>
+<div class="last highlight-python"><div class="highlight"><pre><span></span><span class="n">x</span> <span class="o">=</span> <span class="p">[[</span><span class="mf">0.5</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">,</span> <span class="mf">1.0</span><span class="p">,</span> <span class="mf">1.0</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.0</span><span class="p">,</span> <span class="mf">0.0</span><span class="p">,</span> <spa [...]
+<span class="n">y</span> <span class="o">=</span> <span class="p">[</span><span class="mf">0.25</span><span class="p">,</span> <span class="mf">0.25</span><span class="p">,</span> <span class="mf">0.75</span><span class="p">,</span> <span class="mf">0.75</span><span class="p">]</span>
+<span class="n">box_iou</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">,</span> <span class="n">format</span><span class="o">=</span><span class="s1">'corner'</span><span class="p">)</span> <span class="o">=</span> <span class="p">[[</span><span class="mf">0.1428</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.1428</span><span class="p">]]</span>
+</pre></div>
+</div>
+</dd>
+</dl>
+<p>Defined in src/operator/contrib/bounding_box.cc:L123</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>lhs</strong> (<a class="reference internal" href="ndarray.html#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – The first input</li>
+<li><strong>rhs</strong> (<a class="reference internal" href="ndarray.html#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – The second input</li>
+<li><strong>format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out</strong> (<em>NDArray, optional</em>) – The output NDArray to hold the result.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>out</strong> –
+The output of this function.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArrays</p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
+<dt id="mxnet.ndarray.contrib.box_nms">
+<code class="descclassname">mxnet.ndarray.contrib.</code><code class="descname">box_nms</code><span class="sig-paren">(</span><em>data=None</em>, <em>overlap_thresh=_Null</em>, <em>topk=_Null</em>, <em>coord_start=_Null</em>, <em>score_index=_Null</em>, <em>id_index=_Null</em>, <em>force_suppress=_Null</em>, <em>in_format=_Null</em>, <em>out_format=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarra [...]
+<dd><p>Apply non-maximum suppression to input.</p>
+<p>The output will be sorted in descending order according to <cite>score</cite>. Boxes with
+overlaps larger than <cite>overlap_thresh</cite> and smaller scores will be removed and
+filled with -1, the corresponding position will be recorded for backward propogation.</p>
+<p>During back-propagation, the gradient will be copied to the original
+position according to the input index. For positions that have been suppressed,
+the in_grad will be assigned 0.
+In summary, gradients are sticked to its boxes, will either be moved or discarded
+according to its original index in input.</p>
+<p>Input requirements:
+1. Input tensor have at least 2 dimensions, (n, k), any higher dims will be regarded
+as batch, e.g. (a, b, c, d, n, k) == (a*b*c*d, n, k)
+2. n is the number of boxes in each batch
+3. k is the width of each box item.</p>
+<p>By default, a box is [id, score, xmin, ymin, xmax, ymax, ...],
+additional elements are allowed.
+- <cite>id_index</cite>: optional, use -1 to ignore, useful if <cite>force_suppress=False</cite>, which means
+we will skip highly overlapped boxes if one is <cite>apple</cite> while the other is <cite>car</cite>.
+- <cite>coord_start</cite>: required, default=2, the starting index of the 4 coordinates.
+Two formats are supported:</p>
+<blockquote>
+<div><cite>corner</cite>: [xmin, ymin, xmax, ymax]
+<cite>center</cite>: [x, y, width, height]</div></blockquote>
+<ul class="simple">
+<li><cite>score_index</cite>: required, default=1, box score/confidence.</li>
+</ul>
+<p>When two boxes overlap IOU > <cite>overlap_thresh</cite>, the one with smaller score will be suppressed.
+- <cite>in_format</cite> and <cite>out_format</cite>: default=’corner’, specify in/out box formats.</p>
+<p>Examples:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>x = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
+     [0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
+box_nms(x, overlap_thresh=0.1, coord_start=2, score_index=1, id_index=0,
+    force_suppress=True, in_format='corner', out_typ='corner') =
+    [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
+     [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
+out_grad = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
+            [0.3, 0.3, 0.3, 0.3, 0.3, 0.3], [0.4, 0.4, 0.4, 0.4, 0.4, 0.4]]
+# exe.backward
+in_grad = [[0.2, 0.2, 0.2, 0.2, 0.2, 0.2], [0, 0, 0, 0, 0, 0],
+           [0, 0, 0, 0, 0, 0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]
+</pre></div>
+</div>
+<p>Defined in src/operator/contrib/bounding_box.cc:L82</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>data</strong> (<a class="reference internal" href="ndarray.html#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – The input</li>
+<li><strong>overlap_thresh</strong> (<em>float, optional, default=0.5</em>) – Overlapping(IoU) threshold to suppress object with smaller score.</li>
+<li><strong>topk</strong> (<em>int, optional, default='-1'</em>) – Apply nms to topk boxes with descending scores, -1 to no restriction.</li>
+<li><strong>coord_start</strong> (<em>int, optional, default='2'</em>) – Start index of the consecutive 4 coordinates.</li>
+<li><strong>score_index</strong> (<em>int, optional, default='1'</em>) – Index of the scores/confidence of boxes.</li>
+<li><strong>id_index</strong> (<em>int, optional, default='-1'</em>) – Optional, index of the class categories, -1 to disable.</li>
+<li><strong>force_suppress</strong> (<em>boolean, optional, default=0</em>) – Optional, if set false and id_index is provided, nms will only apply to boxes belongs to the same category</li>
+<li><strong>in_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The input box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The output box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out</strong> (<em>NDArray, optional</em>) – The output NDArray to hold the result.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>out</strong> –
+The output of this function.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArrays</p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
+<dt id="mxnet.ndarray.contrib.box_non_maximum_suppression">
+<code class="descclassname">mxnet.ndarray.contrib.</code><code class="descname">box_non_maximum_suppression</code><span class="sig-paren">(</span><em>data=None</em>, <em>overlap_thresh=_Null</em>, <em>topk=_Null</em>, <em>coord_start=_Null</em>, <em>score_index=_Null</em>, <em>id_index=_Null</em>, <em>force_suppress=_Null</em>, <em>in_format=_Null</em>, <em>out_format=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" [...]
+<dd><p>Apply non-maximum suppression to input.</p>
+<p>The output will be sorted in descending order according to <cite>score</cite>. Boxes with
+overlaps larger than <cite>overlap_thresh</cite> and smaller scores will be removed and
+filled with -1, the corresponding position will be recorded for backward propogation.</p>
+<p>During back-propagation, the gradient will be copied to the original
+position according to the input index. For positions that have been suppressed,
+the in_grad will be assigned 0.
+In summary, gradients are sticked to its boxes, will either be moved or discarded
+according to its original index in input.</p>
+<p>Input requirements:
+1. Input tensor have at least 2 dimensions, (n, k), any higher dims will be regarded
+as batch, e.g. (a, b, c, d, n, k) == (a*b*c*d, n, k)
+2. n is the number of boxes in each batch
+3. k is the width of each box item.</p>
+<p>By default, a box is [id, score, xmin, ymin, xmax, ymax, ...],
+additional elements are allowed.
+- <cite>id_index</cite>: optional, use -1 to ignore, useful if <cite>force_suppress=False</cite>, which means
+we will skip highly overlapped boxes if one is <cite>apple</cite> while the other is <cite>car</cite>.
+- <cite>coord_start</cite>: required, default=2, the starting index of the 4 coordinates.
+Two formats are supported:</p>
+<blockquote>
+<div><cite>corner</cite>: [xmin, ymin, xmax, ymax]
+<cite>center</cite>: [x, y, width, height]</div></blockquote>
+<ul class="simple">
+<li><cite>score_index</cite>: required, default=1, box score/confidence.</li>
+</ul>
+<p>When two boxes overlap IOU > <cite>overlap_thresh</cite>, the one with smaller score will be suppressed.
+- <cite>in_format</cite> and <cite>out_format</cite>: default=’corner’, specify in/out box formats.</p>
+<p>Examples:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>x = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
+     [0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
+box_nms(x, overlap_thresh=0.1, coord_start=2, score_index=1, id_index=0,
+    force_suppress=True, in_format='corner', out_typ='corner') =
+    [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
+     [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
+out_grad = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
+            [0.3, 0.3, 0.3, 0.3, 0.3, 0.3], [0.4, 0.4, 0.4, 0.4, 0.4, 0.4]]
+# exe.backward
+in_grad = [[0.2, 0.2, 0.2, 0.2, 0.2, 0.2], [0, 0, 0, 0, 0, 0],
+           [0, 0, 0, 0, 0, 0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]
+</pre></div>
+</div>
+<p>Defined in src/operator/contrib/bounding_box.cc:L82</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>data</strong> (<a class="reference internal" href="ndarray.html#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – The input</li>
+<li><strong>overlap_thresh</strong> (<em>float, optional, default=0.5</em>) – Overlapping(IoU) threshold to suppress object with smaller score.</li>
+<li><strong>topk</strong> (<em>int, optional, default='-1'</em>) – Apply nms to topk boxes with descending scores, -1 to no restriction.</li>
+<li><strong>coord_start</strong> (<em>int, optional, default='2'</em>) – Start index of the consecutive 4 coordinates.</li>
+<li><strong>score_index</strong> (<em>int, optional, default='1'</em>) – Index of the scores/confidence of boxes.</li>
+<li><strong>id_index</strong> (<em>int, optional, default='-1'</em>) – Optional, index of the class categories, -1 to disable.</li>
+<li><strong>force_suppress</strong> (<em>boolean, optional, default=0</em>) – Optional, if set false and id_index is provided, nms will only apply to boxes belongs to the same category</li>
+<li><strong>in_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The input box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The output box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out</strong> (<em>NDArray, optional</em>) – The output NDArray to hold the result.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>out</strong> –
+The output of this function.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArrays</p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
 <dt id="mxnet.ndarray.contrib.count_sketch">
 <code class="descclassname">mxnet.ndarray.contrib.</code><code class="descname">count_sketch</code><span class="sig-paren">(</span><em>data=None</em>, <em>h=None</em>, <em>s=None</em>, <em>out_dim=_Null</em>, <em>processing_batch_size=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarray.contrib.count_sketch" title="Permalink to this definition">¶</a></dt>
 <dd><p>Apply CountSketch to input: map a d-dimension data to k-dimension data”</p>
diff --git a/versions/master/api/python/ndarray/linalg.html b/versions/master/api/python/ndarray/linalg.html
index acd9400..0bb1b3a 100644
--- a/versions/master/api/python/ndarray/linalg.html
+++ b/versions/master/api/python/ndarray/linalg.html
@@ -53,7 +53,7 @@
 <link href="random.html" rel="prev" title="Random Distribution Generator NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/ndarray/ndarray.html b/versions/master/api/python/ndarray/ndarray.html
index 4ab6a4f..da6d8f5 100644
--- a/versions/master/api/python/ndarray/ndarray.html
+++ b/versions/master/api/python/ndarray/ndarray.html
@@ -53,7 +53,7 @@
 <link href="../index.html" rel="prev" title="MXNet - Python API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -1464,7 +1464,7 @@ broadcasting enabled by default.</li>
 <td>Applies correlation to inputs.</td>
 </tr>
 <tr class="row-even"><td><a class="reference internal" href="#mxnet.ndarray.Deconvolution" title="mxnet.ndarray.Deconvolution"><code class="xref py py-obj docutils literal"><span class="pre">Deconvolution</span></code></a></td>
-<td>Computes 2D transposed convolution (aka fractionally strided convolution) of the input tensor.</td>
+<td>Computes 1D or 2D transposed convolution (aka fractionally strided convolution) of the input tensor.</td>
 </tr>
 <tr class="row-odd"><td><a class="reference internal" href="#mxnet.ndarray.RNN" title="mxnet.ndarray.RNN"><code class="xref py py-obj docutils literal"><span class="pre">RNN</span></code></a></td>
 <td>Applies a recurrent layer to input.</td>
@@ -3172,10 +3172,10 @@ the performance.</li>
 <li><strong>data</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Input data to the ConvolutionOp.</li>
 <li><strong>weight</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Weight matrix.</li>
 <li><strong>bias</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Bias parameter.</li>
-<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Convolution kernel size: (h, w) or (d, h, w)</li>
-<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution stride: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution dilate: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – Zero pad for convolution: (h, w) or (d, h, w). Defaults to no padding.</li>
+<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Convolution kernel size: (w,), (h, w) or (d, h, w)</li>
+<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution stride: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution dilate: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – Zero pad for convolution: (w,), (h, w) or (d, h, w). Defaults to no padding.</li>
 <li><strong>num_filter</strong> (<em>int (non-negative), required</em>) – Convolution filter(channel) number</li>
 <li><strong>num_group</strong> (<em>int (non-negative), optional, default=1</em>) – Number of group partitions.</li>
 <li><strong>workspace</strong> (<em>long (non-negative), optional, default=1024</em>) – Maximum temporary workspace allowed for convolution (MB).</li>
@@ -3365,7 +3365,7 @@ The output of this function.</p>
 <dl class="function">
 <dt id="mxnet.ndarray.Deconvolution">
 <code class="descclassname">mxnet.ndarray.</code><code class="descname">Deconvolution</code><span class="sig-paren">(</span><em>data=None</em>, <em>weight=None</em>, <em>bias=None</em>, <em>kernel=_Null</em>, <em>stride=_Null</em>, <em>dilate=_Null</em>, <em>pad=_Null</em>, <em>adj=_Null</em>, <em>target_shape=_Null</em>, <em>num_filter=_Null</em>, <em>num_group=_Null</em>, <em>workspace=_Null</em>, <em>no_bias=_Null</em>, <em>cudnn_tune=_Null</em>, <em>cudnn_off=_Null</em>, <em>layout=_ [...]
-<dd><p>Computes 2D transposed convolution (aka fractionally strided convolution) of the input tensor. This operation can be seen as the gradient of Convolution operation with respect to its input. Convolution usually reduces the size of the input. Transposed convolution works the other way, going from a smaller input to a larger output while preserving the connectivity pattern.</p>
+<dd><p>Computes 1D or 2D transposed convolution (aka fractionally strided convolution) of the input tensor. This operation can be seen as the gradient of Convolution operation with respect to its input. Convolution usually reduces the size of the input. Transposed convolution works the other way, going from a smaller input to a larger output while preserving the connectivity pattern.</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3374,12 +3374,12 @@ The output of this function.</p>
 <li><strong>data</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Input tensor to the deconvolution operation.</li>
 <li><strong>weight</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Weights representing the kernel.</li>
 <li><strong>bias</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Bias added to the result after the deconvolution operation.</li>
-<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Deconvolution kernel size: (h, w) or (d, h, w). This is same as the kernel size used for the corresponding convolution</li>
-<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – The stride used for the corresponding convolution: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Dilation factor for each dimension of the input: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – The amount of implicit zero padding added during convolution for each dimension of the input: (h, w) or (d, h, w). <code class="docutils literal"><span class="pre">(kernel-1)/2</span></code> is usually a good choice. If <cite>target_shape</cite> is set, <cite>pad</cite> will be ignored and a padding that will generate the target shape will be used. Defaults to no padding.</li>
-<li><strong>adj</strong> (<em>Shape(tuple), optional, default=[]</em>) – Adjustment for output shape: (h, w) or (d, h, w). If <cite>target_shape</cite> is set, <cite>adj</cite> will be ignored and computed accordingly.</li>
-<li><strong>target_shape</strong> (<em>Shape(tuple), optional, default=[]</em>) – Shape of the output tensor: (h, w) or (d, h, w).</li>
+<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Deconvolution kernel size: (w,), (h, w) or (d, h, w). This is same as the kernel size used for the corresponding convolution</li>
+<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – The stride used for the corresponding convolution: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Dilation factor for each dimension of the input: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – The amount of implicit zero padding added during convolution for each dimension of the input: (w,), (h, w) or (d, h, w). <code class="docutils literal"><span class="pre">(kernel-1)/2</span></code> is usually a good choice. If <cite>target_shape</cite> is set, <cite>pad</cite> will be ignored and a padding that will generate the target shape will be used. Defaults to no padding.</li>
+<li><strong>adj</strong> (<em>Shape(tuple), optional, default=[]</em>) – Adjustment for output shape: (w,), (h, w) or (d, h, w). If <cite>target_shape</cite> is set, <cite>adj</cite> will be ignored and computed accordingly.</li>
+<li><strong>target_shape</strong> (<em>Shape(tuple), optional, default=[]</em>) – Shape of the output tensor: (w,), (h, w) or (d, h, w).</li>
 <li><strong>num_filter</strong> (<em>int (non-negative), required</em>) – Number of output filters.</li>
 <li><strong>num_group</strong> (<em>int (non-negative), optional, default=1</em>) – Number of groups partition.</li>
 <li><strong>workspace</strong> (<em>long (non-negative), optional, default=512</em>) – Maximum temporal workspace allowed for deconvolution (MB).</li>
@@ -3526,7 +3526,7 @@ Embedding(x, y, 4, 5) = [[[  5.,   6.,   7.,   8.,   9.],
                           [ 10.,  11.,  12.,  13.,  14.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L184</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L185</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3576,7 +3576,7 @@ the input array into an output array of shape <code class="docutils literal"><sp
    <span class="p">[</span> <span class="mf">1.</span><span class="p">,</span>  <span class="mf">2.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">9.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L150</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -4482,7 +4482,7 @@ At most one dimension of shape can be -1.</p>
 </pre></div>
 </div>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L106</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L164</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -5253,7 +5253,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, m a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">m</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">/</span> <span class="p">(</span><span class="n">sqrt</span><span class="p">(</span><span class="n">v</span><span class="p">[</span><span class="n">row</span><sp [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L175</p>
+<p>Defined in src/operator/optimizer_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -5688,7 +5688,7 @@ which is computed by:</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">batch_dot</span><span class="p">(</span><span class="n">x</span><span class="p">,</span><span class="n">y</span><span class="p">)[</span><span class="n">i</span><span class="p">,:,:]</span> <span class="o">=</span> <span class="n">dot</span><span class="p">(</span><span class="n">x</span><span class="p">[</span><span class="n">i</span><span class="p">,:,:],</span> <span class="n">y</span><span class="p [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/dot.cc:L109</p>
+<p>Defined in src/operator/tensor/dot.cc:L110</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -5734,7 +5734,7 @@ an output array of shape <code class="docutils literal"><span class="pre">(i0,)<
 batch_take(x, [0,1,0]) = [ 1.  4.  5.]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L381</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L382</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -6640,7 +6640,7 @@ The output of this function.</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">cbrt</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">8</span><span class="p">,</span> <span class="o">-</span><span class="mi">125</span><span class="p">])</span> <span class="o">=</span> <span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="o">-</span><span class="mi">5< [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L597</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L602</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -6677,7 +6677,7 @@ The output of this function.</p>
 <li>ceil(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L463</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L464</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -6750,7 +6750,7 @@ Clipping <code class="docutils literal"><span class="pre">x</span></code> betwee
 <li>clip(csr, a_min > 0, a_max > 0) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L424</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L486</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -6933,7 +6933,7 @@ a dense tensor.</p>
                                                           <span class="p">[</span><span class="mf">1.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L297</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L355</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7022,9 +7022,10 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>dot(csr, default) = default</li>
 <li>dot(csr.T, default) = row_sparse</li>
 <li>dot(csr, row_sparse) = default</li>
+<li>dot(default, csr) = csr</li>
 <li>otherwise, <code class="docutils literal"><span class="pre">dot</span></code> generates output with default storage</li>
 </ul>
-<p>Defined in src/operator/tensor/dot.cc:L61</p>
+<p>Defined in src/operator/tensor/dot.cc:L62</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7194,7 +7195,7 @@ The output of this function.</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">exp</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L637</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L642</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7221,7 +7222,7 @@ The output of this function.</p>
 <dd><p>Inserts a new axis of size 1 into the array shape</p>
 <p>For example, given <code class="docutils literal"><span class="pre">x</span></code> with shape <code class="docutils literal"><span class="pre">(2,3,4)</span></code>, then <code class="docutils literal"><span class="pre">expand_dims(x,</span> <span class="pre">axis=1)</span></code>
 will return a new array with shape <code class="docutils literal"><span class="pre">(2,1,3,4)</span></code>.</p>
-<p>Defined in src/operator/tensor/matrix_op.cc:L231</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L289</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7255,7 +7256,7 @@ The output of this function.</p>
 <li>expm1(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L716</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L721</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7317,7 +7318,7 @@ The output of this function.</p>
 <li>fix(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L517</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L521</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7363,7 +7364,7 @@ the input array into an output array of shape <code class="docutils literal"><sp
    <span class="p">[</span> <span class="mf">1.</span><span class="p">,</span>  <span class="mf">2.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">9.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L150</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7400,7 +7401,7 @@ The output of this function.</p>
                       <span class="p">[</span> <span class="mf">9.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L600</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L662</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7438,7 +7439,7 @@ The output of this function.</p>
 <li>floor(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L481</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L483</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7460,6 +7461,51 @@ The output of this function.</p>
 </table>
 </dd></dl>
 <dl class="function">
+<dt id="mxnet.ndarray.ftml_update">
+<code class="descclassname">mxnet.ndarray.</code><code class="descname">ftml_update</code><span class="sig-paren">(</span><em>weight=None</em>, <em>grad=None</em>, <em>d=None</em>, <em>v=None</em>, <em>z=None</em>, <em>lr=_Null</em>, <em>beta1=_Null</em>, <em>beta2=_Null</em>, <em>epsilon=_Null</em>, <em>wd=_Null</em>, <em>rescale_grad=_Null</em>, <em>clip_gradient=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" hr [...]
+<dd><p>The FTML optimizer described in
+<em>FTML - Follow the Moving Leader in Deep Learning</em>,
+available at <a class="reference external" href="http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf">http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf</a>.</p>
+<div class="math">
+\[\begin{split}g_t = \nabla J(W_{t-1})\\
+v_t = \beta_2 v_{t-1} + (1 - \beta_2) g_t^2\\
+d_t = \frac{ (1 - \beta_1^t) }{ \eta_t } (\sqrt{ \frac{ v_t }{ 1 - \beta_2^t } } + \epsilon)
+\sigma_t = d_t - \beta_1 d_{t-1}
+z_t = \beta_1 z_{ t-1 } + (1 - \beta_1^t) g_t - \sigma_t W_{t-1}
+W_t = - \frac{ z_t }{ d_t }\end{split}\]</div>
+<p>Defined in src/operator/optimizer_op.cc:L161</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>weight</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Weight</li>
+<li><strong>grad</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Gradient</li>
+<li><strong>d</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Internal state <code class="docutils literal"><span class="pre">d_t</span></code></li>
+<li><strong>v</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Internal state <code class="docutils literal"><span class="pre">v_t</span></code></li>
+<li><strong>z</strong> (<a class="reference internal" href="#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Internal state <code class="docutils literal"><span class="pre">z_t</span></code></li>
+<li><strong>lr</strong> (<em>float, required</em>) – Learning rate</li>
+<li><strong>beta1</strong> (<em>float, optional, default=0.9</em>) – The decay rate for the 1st moment estimates.</li>
+<li><strong>beta2</strong> (<em>float, optional, default=0.999</em>) – The decay rate for the 2nd moment estimates.</li>
+<li><strong>epsilon</strong> (<em>float, optional, default=1e-08</em>) – A small constant for numerical stability.</li>
+<li><strong>wd</strong> (<em>float, optional, default=0</em>) – Weight decay augments the objective function with a regularization term that penalizes large weights. The penalty scales with the square of the magnitude of each weight.</li>
+<li><strong>rescale_grad</strong> (<em>float, optional, default=1</em>) – Rescale gradient to grad = rescale_grad*grad.</li>
+<li><strong>clip_gradient</strong> (<em>float, optional, default=-1</em>) – Clip gradient to the range of [-clip_gradient, clip_gradient] If clip_gradient <= 0, gradient clipping is turned off. grad = max(min(grad, clip_gradient), -clip_gradient).</li>
+<li><strong>out</strong> (<em>NDArray, optional</em>) – The output NDArray to hold the result.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>out</strong> –
+The output of this function.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArrays</p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
 <dt id="mxnet.ndarray.ftrl_update">
 <code class="descclassname">mxnet.ndarray.</code><code class="descname">ftrl_update</code><span class="sig-paren">(</span><em>weight=None</em>, <em>grad=None</em>, <em>z=None</em>, <em>n=None</em>, <em>lr=_Null</em>, <em>lamda1=_Null</em>, <em>beta=_Null</em>, <em>wd=_Null</em>, <em>rescale_grad=_Null</em>, <em>clip_gradient=_Null</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarray.ftrl_update" title="Pe [...]
 <dd><p>Update function for Ftrl optimizer.
@@ -7481,7 +7527,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="p">(</span><span class="n">sign</span><span class="p">(</span><span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span class="o">*</span> <span class="n">lamda1</span> <span class="o">-</span> <span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span cla [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L308</p>
+<p>Defined in src/operator/optimizer_op.cc:L341</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7630,6 +7676,56 @@ The output of this function.</p>
 </table>
 </dd></dl>
 <dl class="function">
+<dt id="mxnet.ndarray.khatri_rao">
+<code class="descclassname">mxnet.ndarray.</code><code class="descname">khatri_rao</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarray.khatri_rao" title="Permalink to this definition">¶</a></dt>
+<dd><p>Computes the Khatri-Rao product of the input matrices.</p>
+<p>Given a collection of <span class="math">\(n\)</span> input matrices,</p>
+<div class="math">
+\[A_1 \in \mathbb{R}^{M_1 \times M}, \ldots, A_n \in \mathbb{R}^{M_n \times N},\]</div>
+<p>the (column-wise) Khatri-Rao product is defined as the matrix,</p>
+<div class="math">
+\[X = A_1 \otimes \cdots \otimes A_n \in \mathbb{R}^{(M_1 \cdots M_n) \times N},\]</div>
+<p>where the <span class="math">\(k`th column is equal to the column-wise outer product
+:math:`{A_1}_k \otimes \cdots \otimes {A_n}_k\)</span> where <span class="math">\({A_i}_k\)</span> is the kth
+column of the ith matrix.</p>
+<p>Example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">A</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">],</span>
+<span class="gp">>>> </span>                 <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="o">-</span><span class="mi">3</span><span class="p">]])</span>
+<span class="gp">>>> </span><span class="n">B</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">4</span><span class="p">],</span>
+<span class="gp">>>> </span>                 <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="mi">5</span><span class="p">],</span>
+<span class="gp">>>> </span>                 <span class="p">[</span><span class="mi">3</span><span class="p">,</span> <span class="mi">6</span><span class="p">]])</span>
+<span class="gp">>>> </span><span class="n">C</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">khatri_rao</span><span class="p">(</span><span class="n">A</span><span class="p">,</span> <span class="n">B</span><span class="p">)</span>
+<span class="gp">>>> </span><span class="k">print</span><span class="p">(</span><span class="n">C</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">())</span>
+<span class="go">[[  1.  -4.]</span>
+<span class="go"> [  2.  -5.]</span>
+<span class="go"> [  3.  -6.]</span>
+<span class="go"> [  2. -12.]</span>
+<span class="go"> [  4. -15.]</span>
+<span class="go"> [  6. -18.]]</span>
+</pre></div>
+</div>
+<p>Defined in src/operator/contrib/krprod.cc:L108</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>args</strong> (<em>NDArray[]</em>) – Positional input matrices</li>
+<li><strong>out</strong> (<em>NDArray, optional</em>) – The output NDArray to hold the result.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first"><strong>out</strong> –
+The output of this function.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArrays</p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
 <dt id="mxnet.ndarray.linalg_gelqf">
 <code class="descclassname">mxnet.ndarray.</code><code class="descname">linalg_gelqf</code><span class="sig-paren">(</span><em>A=None</em>, <em>out=None</em>, <em>name=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.ndarray.linalg_gelqf" title="Permalink to this definition">¶</a></dt>
 <dd><p>LQ factorization for general matrix.
@@ -8141,7 +8237,7 @@ The output of this function.</p>
 <dd><p>Returns element-wise Natural logarithmic value of the input.</p>
 <p>The natural logarithm is logarithm in base <em>e</em>, so that <code class="docutils literal"><span class="pre">log(exp(x))</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L649</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L654</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8168,7 +8264,7 @@ The output of this function.</p>
 <dd><p>Returns element-wise Base-10 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">10**log10(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log10</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L661</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L666</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8202,7 +8298,7 @@ The output of this function.</p>
 <li>log1p(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L698</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L703</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8229,7 +8325,7 @@ The output of this function.</p>
 <dd><p>Returns element-wise Base-2 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">2**log2(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log2</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L673</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L678</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8789,7 +8885,7 @@ one_hot([[1,0],[1,0],[2,0]], 3) = [[[ 0.  1.  0.]
                                     [ 1.  0.  0.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L427</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L428</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9350,7 +9446,7 @@ The output of this function.</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">rcbrt</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span><span class="mi">8</span><span class="p">,</span><span class="o">-</span><span class="mi">125</span><span class="p">])</span> <span class="o">=</span> <span class="p">[</span><span class="mf">1.0</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">,</span> <span class="o">-</span><span class="mf" [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L614</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L619</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9460,7 +9556,7 @@ elements:</p>
                                  <span class="p">[</span> <span class="mf">3.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L498</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L560</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9549,7 +9645,7 @@ At most one dimension of shape can be -1.</p>
 </pre></div>
 </div>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L106</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L164</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9615,7 +9711,7 @@ The output of this function.</p>
                       <span class="p">[</span> <span class="mf">9.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L600</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L662</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9706,7 +9802,7 @@ gradient and <span class="math">\(E[g^2]_t\)</span> is the decaying average over
 Tieleman &amp; Hinton, 2012.</p>
 <p>Hinton suggests the momentum term <span class="math">\(\gamma\)</span> to be 0.9 and the learning rate
 <span class="math">\(\eta\)</span> to be 0.001.</p>
-<p>Defined in src/operator/optimizer_op.cc:L229</p>
+<p>Defined in src/operator/optimizer_op.cc:L262</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9754,7 +9850,7 @@ E[g]_t = \gamma_1 * E[g]_{t-1} + (1 - \gamma_1) * g_t\\
 <a class="reference external" href="http://arxiv.org/pdf/1308.0850v5.pdf">http://arxiv.org/pdf/1308.0850v5.pdf</a> Eq(38) - Eq(45) by Alex Graves, 2013.</p>
 <p>Graves suggests the momentum term <span class="math">\(\gamma_1\)</span> to be 0.95, <span class="math">\(\gamma_2\)</span>
 to be 0.9 and the learning rate <span class="math">\(\eta\)</span> to be 0.0001.</p>
-<p>Defined in src/operator/optimizer_op.cc:L268</p>
+<p>Defined in src/operator/optimizer_op.cc:L301</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9834,7 +9930,7 @@ The output of this function.</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">rsqrt</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L580</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L585</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9880,7 +9976,7 @@ sample_exponential(lam, shape=(2)) = [[ 0.51837951,  0.19866663],
                                       [ 0.09994757,  0.50447971]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L283</p>
+<p>Defined in src/operator/random/multisample_op.cc:L284</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9929,7 +10025,7 @@ sample_gamma(alpha, beta, shape=(2)) = [[ 0.        ,  0.        ],
                                         [ 2.25797319,  1.70734084]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L281</p>
+<p>Defined in src/operator/random/multisample_op.cc:L282</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9980,7 +10076,7 @@ sample_generalized_negative_binomial(mu, alpha, shape=(2)) = [[ 0.,  3.],
                                                               [ 3.,  1.]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L292</p>
+<p>Defined in src/operator/random/multisample_op.cc:L293</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10082,7 +10178,7 @@ sample_negative_binomial(k, p, shape=(2)) = [[ 15.,  50.],
                                              [ 16.,  12.]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L288</p>
+<p>Defined in src/operator/random/multisample_op.cc:L289</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10132,7 +10228,7 @@ sample_normal(mu, sigma, shape=(2)) = [[-0.56410581,  0.2928229 ],
                                        [ 0.95934606,  4.48287058]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L278</p>
+<p>Defined in src/operator/random/multisample_op.cc:L279</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10182,7 +10278,7 @@ sample_poisson(lam, shape=(2)) = [[  0.,   4.],
                                   [ 13.,   8.]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L285</p>
+<p>Defined in src/operator/random/multisample_op.cc:L286</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10231,7 +10327,7 @@ sample_uniform(low, high, shape=(2)) = [[ 0.40451524,  0.18017688],
                                         [ 3.18687344,  3.68352246]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L276</p>
+<p>Defined in src/operator/random/multisample_op.cc:L277</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10323,7 +10419,7 @@ only the row slices whose indices appear in grad.indices are updated (for both w
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="n">v</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L93</p>
+<p>Defined in src/operator/optimizer_op.cc:L94</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10365,7 +10461,7 @@ only the row slices whose indices appear in grad.indices are updated:</p>
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">gradient</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L53</p>
+<p>Defined in src/operator/optimizer_op.cc:L54</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10571,7 +10667,7 @@ a dense tensor.</p>
                                                           <span class="p">[</span><span class="mf">1.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L297</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L355</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10618,7 +10714,7 @@ to the <cite>end</cite> index.</p>
                                            <span class="p">[</span> <span class="mf">10.</span><span class="p">,</span>  <span class="mf">11.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L380</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L442</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10924,7 +11020,7 @@ The output of this function.</p>
 <li>sqrt(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L560</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L565</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10963,7 +11059,7 @@ The output of this function.</p>
 <li>square(csr) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L537</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L542</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11099,7 +11195,7 @@ data = [[1,2,0],
 csr = cast_storage(data, 'csr')
 
 sum(csr, axis=0)
-[ 8.  2.  2.]
+[ 8.  3.  1.]
 
 sum(csr, axis=1)
 [ 3.  4.  5.]
@@ -11168,7 +11264,7 @@ data = [[1,2,0],
 csr = cast_storage(data, 'csr')
 
 sum(csr, axis=0)
-[ 8.  2.  2.]
+[ 8.  3.  1.]
 
 sum(csr, axis=1)
 [ 3.  4.  5.]
@@ -11281,7 +11377,7 @@ take(x, [[0,1],[1,2]]) = [[[ 1.,  2.],
                            [ 5.,  6.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L326</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L327</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11414,7 +11510,7 @@ shape <code class="docutils literal"><span class="pre">(2,2)</span></code> array
 </div>
 </li>
 </ul>
-<p>Defined in src/operator/tensor/matrix_op.cc:L559</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L621</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11520,7 +11616,7 @@ The output of this function.</p>
                                <span class="p">[</span> <span class="mf">7.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">]]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L195</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L253</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11559,7 +11655,7 @@ zero than x is. In short, the fractional part of the signed number x is discarde
 <li>trunc(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L500</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L503</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
diff --git a/versions/master/api/python/ndarray/random.html b/versions/master/api/python/ndarray/random.html
index ccb4f28..1107772 100644
--- a/versions/master/api/python/ndarray/random.html
+++ b/versions/master/api/python/ndarray/random.html
@@ -53,7 +53,7 @@
 <link href="ndarray.html" rel="prev" title="NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/ndarray/sparse.html b/versions/master/api/python/ndarray/sparse.html
index 787383c..c5e7636 100644
--- a/versions/master/api/python/ndarray/sparse.html
+++ b/versions/master/api/python/ndarray/sparse.html
@@ -53,7 +53,7 @@
 <link href="linalg.html" rel="prev" title="Linear Algebra NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -293,6 +293,10 @@ imperative sparse tensor operations on <strong>CPU</strong>.</p>
 <br/><br/></p>
 <div class="admonition note">
 <p class="first admonition-title">Note</p>
+<p class="last"><code class="docutils literal"><span class="pre">mxnet.ndarray.sparse.RowSparseNDArray</span></code> and <code class="docutils literal"><span class="pre">mxnet.ndarray.sparse.CSRNDArray</span></code> DO NOT support the <code class="docutils literal"><span class="pre">mxnet.gluon</span></code> high-level interface yet.</p>
+</div>
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
 <p><code class="docutils literal"><span class="pre">mxnet.ndarray.sparse</span></code> is similar to <code class="docutils literal"><span class="pre">mxnet.ndarray</span></code> in some aspects. But the differences are not negligible. For instance:</p>
 <ul class="last simple">
 <li>Only a subset of operators in <code class="docutils literal"><span class="pre">mxnet.ndarray</span></code> have specialized implementations in <code class="docutils literal"><span class="pre">mxnet.ndarray.sparse</span></code>.
@@ -2305,7 +2309,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, m a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">m</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">/</span> <span class="p">(</span><span class="n">sqrt</span><span class="p">(</span><span class="n">v</span><span class="p">[</span><span class="n">row</span><sp [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L175</p>
+<p>Defined in src/operator/optimizer_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2634,7 +2638,7 @@ The output of this function.</p>
 <li>ceil(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L463</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L464</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2682,7 +2686,7 @@ Clipping <code class="docutils literal"><span class="pre">x</span></code> betwee
 <li>clip(csr, a_min > 0, a_max > 0) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L424</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L486</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2827,9 +2831,10 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>dot(csr, default) = default</li>
 <li>dot(csr.T, default) = row_sparse</li>
 <li>dot(csr, row_sparse) = default</li>
+<li>dot(default, csr) = csr</li>
 <li>otherwise, <code class="docutils literal"><span class="pre">dot</span></code> generates output with default storage</li>
 </ul>
-<p>Defined in src/operator/tensor/dot.cc:L61</p>
+<p>Defined in src/operator/tensor/dot.cc:L62</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2992,7 +2997,7 @@ The output of this function.</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">exp</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L637</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L642</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3025,7 +3030,7 @@ The output of this function.</p>
 <li>expm1(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L716</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L721</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3061,7 +3066,7 @@ The output of this function.</p>
 <li>fix(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L517</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L521</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3098,7 +3103,7 @@ The output of this function.</p>
 <li>floor(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L481</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L483</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3141,7 +3146,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="p">(</span><span class="n">sign</span><span class="p">(</span><span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span class="o">*</span> <span class="n">lamda1</span> <span class="o">-</span> <span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span cla [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L308</p>
+<p>Defined in src/operator/optimizer_op.cc:L341</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3227,7 +3232,7 @@ The output of this function.</p>
 <dd><p>Returns element-wise Natural logarithmic value of the input.</p>
 <p>The natural logarithm is logarithm in base <em>e</em>, so that <code class="docutils literal"><span class="pre">log(exp(x))</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L649</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L654</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3254,7 +3259,7 @@ The output of this function.</p>
 <dd><p>Returns element-wise Base-10 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">10**log10(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log10</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L661</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L666</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3288,7 +3293,7 @@ The output of this function.</p>
 <li>log1p(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L698</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L703</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3315,7 +3320,7 @@ The output of this function.</p>
 <dd><p>Returns element-wise Base-2 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">2**log2(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log2</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L673</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L678</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3650,7 +3655,7 @@ The output of this function.</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">rsqrt</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L580</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L585</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3694,7 +3699,7 @@ only the row slices whose indices appear in grad.indices are updated (for both w
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="n">v</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L93</p>
+<p>Defined in src/operator/optimizer_op.cc:L94</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3736,7 +3741,7 @@ only the row slices whose indices appear in grad.indices are updated:</p>
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">gradient</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L53</p>
+<p>Defined in src/operator/optimizer_op.cc:L54</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3942,7 +3947,7 @@ a dense tensor.</p>
                                                           <span class="p">[</span><span class="mf">1.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L297</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L355</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3983,7 +3988,7 @@ The output of this function.</p>
 <li>sqrt(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L560</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L565</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -4022,7 +4027,7 @@ The output of this function.</p>
 <li>square(csr) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L537</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L542</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -4120,7 +4125,7 @@ data = [[1,2,0],
 csr = cast_storage(data, 'csr')
 
 sum(csr, axis=0)
-[ 8.  2.  2.]
+[ 8.  3.  1.]
 
 sum(csr, axis=1)
 [ 3.  4.  5.]
@@ -4245,7 +4250,7 @@ zero than x is. In short, the fractional part of the signed number x is discarde
 <li>trunc(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L500</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L503</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
diff --git a/versions/master/api/python/optimization/optimization.html b/versions/master/api/python/optimization/optimization.html
index 478d747..8e9cda0 100644
--- a/versions/master/api/python/optimization/optimization.html
+++ b/versions/master/api/python/optimization/optimization.html
@@ -53,7 +53,7 @@
 <link href="../image/image.html" rel="prev" title="Image API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -742,6 +742,29 @@ by <a class="reference internal" href="#mxnet.optimizer.Optimizer" title="mxnet.
 </table>
 </dd></dl>
 <dl class="class">
+<dt id="mxnet.optimizer.FTML">
+<em class="property">class </em><code class="descclassname">mxnet.optimizer.</code><code class="descname">FTML</code><span class="sig-paren">(</span><em>beta1=0.6</em>, <em>beta2=0.999</em>, <em>epsilon=1e-08</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/mxnet/optimizer.html#FTML"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.optimizer.FTML" title="Permalink to this definition">¶</a></dt>
+<dd><p>The FTML optimizer.</p>
+<p>This class implements the optimizer described in
+<em>FTML - Follow the Moving Leader in Deep Learning</em>,
+available at <a class="reference external" href="http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf">http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf</a>.</p>
+<p>This optimizer accepts the following parameters in addition to those accepted
+by <a class="reference internal" href="#mxnet.optimizer.Optimizer" title="mxnet.optimizer.Optimizer"><code class="xref py py-class docutils literal"><span class="pre">Optimizer</span></code></a>.</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
+<li><strong>beta1</strong> (<em>float, optional</em>) – 0 < beta1 < 1. Generally close to 0.5.</li>
+<li><strong>beta2</strong> (<em>float, optional</em>) – 0 < beta2 < 1. Generally close to 1.</li>
+<li><strong>epsilon</strong> (<em>float, optional</em>) – Small value to avoid division by 0.</li>
+</ul>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="class">
 <dt id="mxnet.optimizer.DCASGD">
 <em class="property">class </em><code class="descclassname">mxnet.optimizer.</code><code class="descname">DCASGD</code><span class="sig-paren">(</span><em>momentum=0.0</em>, <em>lamda=0.04</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/mxnet/optimizer.html#DCASGD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.optimizer.DCASGD" title="Permalink to this definition">¶</a></dt>
 <dd><p>The DCASGD optimizer.</p>
diff --git a/versions/master/api/python/rtc/rtc.html b/versions/master/api/python/rtc/rtc.html
index 5103e58..702f99c 100644
--- a/versions/master/api/python/rtc/rtc.html
+++ b/versions/master/api/python/rtc/rtc.html
@@ -53,7 +53,7 @@
 <link href="../metric/metric.html" rel="prev" title="Evaluation Metric API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/symbol/contrib.html b/versions/master/api/python/symbol/contrib.html
index 47bbaf8..b8ae460 100644
--- a/versions/master/api/python/symbol/contrib.html
+++ b/versions/master/api/python/symbol/contrib.html
@@ -53,7 +53,7 @@
 <link href="sparse.html" rel="prev" title="Sparse Symbol API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -716,7 +716,7 @@ SparseEmbedding(x, y, 4, 5) = [[[  5.,   6.,   7.,   8.,   9.],
                                [ 10.,  11.,  12.,  13.,  14.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L253</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L254</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -741,6 +741,248 @@ SparseEmbedding(x, y, 4, 5) = [[[  5.,   6.,   7.,   8.,   9.],
 </table>
 </dd></dl>
 <dl class="function">
+<dt id="mxnet.symbol.contrib.bipartite_matching">
+<code class="descclassname">mxnet.symbol.contrib.</code><code class="descname">bipartite_matching</code><span class="sig-paren">(</span><em>data=None</em>, <em>is_ascend=_Null</em>, <em>threshold=_Null</em>, <em>topk=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.symbol.contrib.bipartite_matching" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="docutils">
+<dt>Compute bipartite matching.</dt>
+<dd><p class="first">The matching is performed on score matrix with shape [B, N, M]
+- B: batch_size
+- N: number of rows to match
+- M: number of columns as reference to be matched against.</p>
+<p>Returns:
+x : matched column indices. -1 indicating non-matched elements in rows.
+y : matched row indices.</p>
+<p>Note:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>Zero gradients are back-propagated in this op for now.
+</pre></div>
+</div>
+<p>Example:</p>
+<div class="last highlight-python"><div class="highlight"><pre><span></span>s = [[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]]
+x, y = bipartite_matching(x, threshold=1e-12, is_ascend=False)
+x = [1, -1, 0]
+y = [2, 0]
+</pre></div>
+</div>
+</dd>
+</dl>
+<p>Defined in src/operator/contrib/bounding_box.cc:L169</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>data</strong> (<a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – The input</li>
+<li><strong>is_ascend</strong> (<em>boolean, optional, default=0</em>) – Use ascend order for scores instead of descending. Please set threshold accordingly.</li>
+<li><strong>threshold</strong> (<em>float, required</em>) – Ignore matching when score < thresh, if is_ascend=false, or ignore score > thresh, if is_ascend=true.</li>
+<li><strong>topk</strong> (<em>int, optional, default='-1'</em>) – Limit the number of matches to topk, set -1 for no limit</li>
+<li><strong>name</strong> (<em>string, optional.</em>) – Name of the resulting symbol.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">The result symbol.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
+<dt id="mxnet.symbol.contrib.box_iou">
+<code class="descclassname">mxnet.symbol.contrib.</code><code class="descname">box_iou</code><span class="sig-paren">(</span><em>lhs=None</em>, <em>rhs=None</em>, <em>format=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.symbol.contrib.box_iou" title="Permalink to this definition">¶</a></dt>
+<dd><dl class="docutils">
+<dt>Bounding box overlap of two arrays.</dt>
+<dd><p class="first">The overlap is defined as Intersection-over-Union, aka, IOU.
+- lhs: (a_1, a_2, ..., a_n, 4) array
+- rhs: (b_1, b_2, ..., b_n, 4) array
+- output: (a_1, a_2, ..., a_n, b_1, b_2, ..., b_n) array</p>
+<p>Note:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>Zero gradients are back-propagated in this op for now.
+</pre></div>
+</div>
+<p>Example:</p>
+<div class="last highlight-python"><div class="highlight"><pre><span></span><span class="n">x</span> <span class="o">=</span> <span class="p">[[</span><span class="mf">0.5</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">,</span> <span class="mf">1.0</span><span class="p">,</span> <span class="mf">1.0</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.0</span><span class="p">,</span> <span class="mf">0.0</span><span class="p">,</span> <spa [...]
+<span class="n">y</span> <span class="o">=</span> <span class="p">[</span><span class="mf">0.25</span><span class="p">,</span> <span class="mf">0.25</span><span class="p">,</span> <span class="mf">0.75</span><span class="p">,</span> <span class="mf">0.75</span><span class="p">]</span>
+<span class="n">box_iou</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">,</span> <span class="n">format</span><span class="o">=</span><span class="s1">'corner'</span><span class="p">)</span> <span class="o">=</span> <span class="p">[[</span><span class="mf">0.1428</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.1428</span><span class="p">]]</span>
+</pre></div>
+</div>
+</dd>
+</dl>
+<p>Defined in src/operator/contrib/bounding_box.cc:L123</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>lhs</strong> (<a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – The first input</li>
+<li><strong>rhs</strong> (<a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – The second input</li>
+<li><strong>format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>name</strong> (<em>string, optional.</em>) – Name of the resulting symbol.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">The result symbol.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
+<dt id="mxnet.symbol.contrib.box_nms">
+<code class="descclassname">mxnet.symbol.contrib.</code><code class="descname">box_nms</code><span class="sig-paren">(</span><em>data=None</em>, <em>overlap_thresh=_Null</em>, <em>topk=_Null</em>, <em>coord_start=_Null</em>, <em>score_index=_Null</em>, <em>id_index=_Null</em>, <em>force_suppress=_Null</em>, <em>in_format=_Null</em>, <em>out_format=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink"  [...]
+<dd><p>Apply non-maximum suppression to input.</p>
+<p>The output will be sorted in descending order according to <cite>score</cite>. Boxes with
+overlaps larger than <cite>overlap_thresh</cite> and smaller scores will be removed and
+filled with -1, the corresponding position will be recorded for backward propogation.</p>
+<p>During back-propagation, the gradient will be copied to the original
+position according to the input index. For positions that have been suppressed,
+the in_grad will be assigned 0.
+In summary, gradients are sticked to its boxes, will either be moved or discarded
+according to its original index in input.</p>
+<p>Input requirements:
+1. Input tensor have at least 2 dimensions, (n, k), any higher dims will be regarded
+as batch, e.g. (a, b, c, d, n, k) == (a*b*c*d, n, k)
+2. n is the number of boxes in each batch
+3. k is the width of each box item.</p>
+<p>By default, a box is [id, score, xmin, ymin, xmax, ymax, ...],
+additional elements are allowed.
+- <cite>id_index</cite>: optional, use -1 to ignore, useful if <cite>force_suppress=False</cite>, which means
+we will skip highly overlapped boxes if one is <cite>apple</cite> while the other is <cite>car</cite>.
+- <cite>coord_start</cite>: required, default=2, the starting index of the 4 coordinates.
+Two formats are supported:</p>
+<blockquote>
+<div><cite>corner</cite>: [xmin, ymin, xmax, ymax]
+<cite>center</cite>: [x, y, width, height]</div></blockquote>
+<ul class="simple">
+<li><cite>score_index</cite>: required, default=1, box score/confidence.</li>
+</ul>
+<p>When two boxes overlap IOU > <cite>overlap_thresh</cite>, the one with smaller score will be suppressed.
+- <cite>in_format</cite> and <cite>out_format</cite>: default=’corner’, specify in/out box formats.</p>
+<p>Examples:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>x = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
+     [0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
+box_nms(x, overlap_thresh=0.1, coord_start=2, score_index=1, id_index=0,
+    force_suppress=True, in_format='corner', out_typ='corner') =
+    [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
+     [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
+out_grad = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
+            [0.3, 0.3, 0.3, 0.3, 0.3, 0.3], [0.4, 0.4, 0.4, 0.4, 0.4, 0.4]]
+# exe.backward
+in_grad = [[0.2, 0.2, 0.2, 0.2, 0.2, 0.2], [0, 0, 0, 0, 0, 0],
+           [0, 0, 0, 0, 0, 0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]
+</pre></div>
+</div>
+<p>Defined in src/operator/contrib/bounding_box.cc:L82</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>data</strong> (<a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – The input</li>
+<li><strong>overlap_thresh</strong> (<em>float, optional, default=0.5</em>) – Overlapping(IoU) threshold to suppress object with smaller score.</li>
+<li><strong>topk</strong> (<em>int, optional, default='-1'</em>) – Apply nms to topk boxes with descending scores, -1 to no restriction.</li>
+<li><strong>coord_start</strong> (<em>int, optional, default='2'</em>) – Start index of the consecutive 4 coordinates.</li>
+<li><strong>score_index</strong> (<em>int, optional, default='1'</em>) – Index of the scores/confidence of boxes.</li>
+<li><strong>id_index</strong> (<em>int, optional, default='-1'</em>) – Optional, index of the class categories, -1 to disable.</li>
+<li><strong>force_suppress</strong> (<em>boolean, optional, default=0</em>) – Optional, if set false and id_index is provided, nms will only apply to boxes belongs to the same category</li>
+<li><strong>in_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The input box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The output box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>name</strong> (<em>string, optional.</em>) – Name of the resulting symbol.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">The result symbol.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
+<dt id="mxnet.symbol.contrib.box_non_maximum_suppression">
+<code class="descclassname">mxnet.symbol.contrib.</code><code class="descname">box_non_maximum_suppression</code><span class="sig-paren">(</span><em>data=None</em>, <em>overlap_thresh=_Null</em>, <em>topk=_Null</em>, <em>coord_start=_Null</em>, <em>score_index=_Null</em>, <em>id_index=_Null</em>, <em>force_suppress=_Null</em>, <em>in_format=_Null</em>, <em>out_format=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a [...]
+<dd><p>Apply non-maximum suppression to input.</p>
+<p>The output will be sorted in descending order according to <cite>score</cite>. Boxes with
+overlaps larger than <cite>overlap_thresh</cite> and smaller scores will be removed and
+filled with -1, the corresponding position will be recorded for backward propogation.</p>
+<p>During back-propagation, the gradient will be copied to the original
+position according to the input index. For positions that have been suppressed,
+the in_grad will be assigned 0.
+In summary, gradients are sticked to its boxes, will either be moved or discarded
+according to its original index in input.</p>
+<p>Input requirements:
+1. Input tensor have at least 2 dimensions, (n, k), any higher dims will be regarded
+as batch, e.g. (a, b, c, d, n, k) == (a*b*c*d, n, k)
+2. n is the number of boxes in each batch
+3. k is the width of each box item.</p>
+<p>By default, a box is [id, score, xmin, ymin, xmax, ymax, ...],
+additional elements are allowed.
+- <cite>id_index</cite>: optional, use -1 to ignore, useful if <cite>force_suppress=False</cite>, which means
+we will skip highly overlapped boxes if one is <cite>apple</cite> while the other is <cite>car</cite>.
+- <cite>coord_start</cite>: required, default=2, the starting index of the 4 coordinates.
+Two formats are supported:</p>
+<blockquote>
+<div><cite>corner</cite>: [xmin, ymin, xmax, ymax]
+<cite>center</cite>: [x, y, width, height]</div></blockquote>
+<ul class="simple">
+<li><cite>score_index</cite>: required, default=1, box score/confidence.</li>
+</ul>
+<p>When two boxes overlap IOU > <cite>overlap_thresh</cite>, the one with smaller score will be suppressed.
+- <cite>in_format</cite> and <cite>out_format</cite>: default=’corner’, specify in/out box formats.</p>
+<p>Examples:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span>x = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
+     [0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
+box_nms(x, overlap_thresh=0.1, coord_start=2, score_index=1, id_index=0,
+    force_suppress=True, in_format='corner', out_typ='corner') =
+    [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
+     [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
+out_grad = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
+            [0.3, 0.3, 0.3, 0.3, 0.3, 0.3], [0.4, 0.4, 0.4, 0.4, 0.4, 0.4]]
+# exe.backward
+in_grad = [[0.2, 0.2, 0.2, 0.2, 0.2, 0.2], [0, 0, 0, 0, 0, 0],
+           [0, 0, 0, 0, 0, 0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]
+</pre></div>
+</div>
+<p>Defined in src/operator/contrib/bounding_box.cc:L82</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>data</strong> (<a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – The input</li>
+<li><strong>overlap_thresh</strong> (<em>float, optional, default=0.5</em>) – Overlapping(IoU) threshold to suppress object with smaller score.</li>
+<li><strong>topk</strong> (<em>int, optional, default='-1'</em>) – Apply nms to topk boxes with descending scores, -1 to no restriction.</li>
+<li><strong>coord_start</strong> (<em>int, optional, default='2'</em>) – Start index of the consecutive 4 coordinates.</li>
+<li><strong>score_index</strong> (<em>int, optional, default='1'</em>) – Index of the scores/confidence of boxes.</li>
+<li><strong>id_index</strong> (<em>int, optional, default='-1'</em>) – Optional, index of the class categories, -1 to disable.</li>
+<li><strong>force_suppress</strong> (<em>boolean, optional, default=0</em>) – Optional, if set false and id_index is provided, nms will only apply to boxes belongs to the same category</li>
+<li><strong>in_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The input box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>out_format</strong> (<em>{'center', 'corner'},optional, default='corner'</em>) – The output box encoding type.
+“corner” means boxes are encoded as [xmin, ymin, xmax, ymax], “center” means boxes are encodes as [x, y, width, height].</li>
+<li><strong>name</strong> (<em>string, optional.</em>) – Name of the resulting symbol.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">The result symbol.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
 <dt id="mxnet.symbol.contrib.count_sketch">
 <code class="descclassname">mxnet.symbol.contrib.</code><code class="descname">count_sketch</code><span class="sig-paren">(</span><em>data=None</em>, <em>h=None</em>, <em>s=None</em>, <em>out_dim=_Null</em>, <em>processing_batch_size=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.symbol.contrib.count_sketch" title="Permalink to this definition">¶</a></dt>
 <dd><p>Apply CountSketch to input: map a d-dimension data to k-dimension data”</p>
diff --git a/versions/master/api/python/symbol/linalg.html b/versions/master/api/python/symbol/linalg.html
index 072d68e..67cab22 100644
--- a/versions/master/api/python/symbol/linalg.html
+++ b/versions/master/api/python/symbol/linalg.html
@@ -53,7 +53,7 @@
 <link href="random.html" rel="prev" title="Random Distribution Generator Symbol API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/symbol/random.html b/versions/master/api/python/symbol/random.html
index d4fb0f7..33e2026 100644
--- a/versions/master/api/python/symbol/random.html
+++ b/versions/master/api/python/symbol/random.html
@@ -53,7 +53,7 @@
 <link href="symbol.html" rel="prev" title="Symbol API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/symbol/rnn.html b/versions/master/api/python/symbol/rnn.html
index 2af9842..8872415 100644
--- a/versions/master/api/python/symbol/rnn.html
+++ b/versions/master/api/python/symbol/rnn.html
@@ -53,7 +53,7 @@
 <link href="contrib.html" rel="prev" title="Contrib Symbol API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/python/symbol/sparse.html b/versions/master/api/python/symbol/sparse.html
index 05cd73a..74b41cd 100644
--- a/versions/master/api/python/symbol/sparse.html
+++ b/versions/master/api/python/symbol/sparse.html
@@ -53,7 +53,7 @@
 <link href="linalg.html" rel="prev" title="Linear Algebra Symbol API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -642,7 +642,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, m a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">m</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">/</span> <span class="p">(</span><span class="n">sqrt</span><span class="p">(</span><span class="n">v</span><span class="p">[</span><span class="n">row</span><sp [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L175</p>
+<p>Defined in src/operator/optimizer_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -963,7 +963,7 @@ the result is compact, which means:</p>
 <li>ceil(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L463</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L464</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1010,7 +1010,7 @@ Clipping <code class="docutils literal"><span class="pre">x</span></code> betwee
 <li>clip(csr, a_min > 0, a_max > 0) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L424</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L486</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1151,9 +1151,10 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>dot(csr, default) = default</li>
 <li>dot(csr.T, default) = row_sparse</li>
 <li>dot(csr, row_sparse) = default</li>
+<li>dot(default, csr) = csr</li>
 <li>otherwise, <code class="docutils literal"><span class="pre">dot</span></code> generates output with default storage</li>
 </ul>
-<p>Defined in src/operator/tensor/dot.cc:L61</p>
+<p>Defined in src/operator/tensor/dot.cc:L62</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1311,7 +1312,7 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">exp</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L637</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L642</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1343,7 +1344,7 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>expm1(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L716</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L721</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1378,7 +1379,7 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>fix(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L517</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L521</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1414,7 +1415,7 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>floor(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L481</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L483</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1456,7 +1457,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="p">(</span><span class="n">sign</span><span class="p">(</span><span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span class="o">*</span> <span class="n">lamda1</span> <span class="o">-</span> <span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span cla [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L308</p>
+<p>Defined in src/operator/optimizer_op.cc:L341</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1539,7 +1540,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
 <dd><p>Returns element-wise Natural logarithmic value of the input.</p>
 <p>The natural logarithm is logarithm in base <em>e</em>, so that <code class="docutils literal"><span class="pre">log(exp(x))</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L649</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L654</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1565,7 +1566,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
 <dd><p>Returns element-wise Base-10 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">10**log10(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log10</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L661</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L666</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1598,7 +1599,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
 <li>log1p(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L698</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L703</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1624,7 +1625,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
 <dd><p>Returns element-wise Base-2 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">2**log2(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log2</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L673</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L678</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1950,7 +1951,7 @@ and save them in the output sparse matrix.</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">rsqrt</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L580</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L585</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -1993,7 +1994,7 @@ only the row slices whose indices appear in grad.indices are updated (for both w
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="n">v</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L93</p>
+<p>Defined in src/operator/optimizer_op.cc:L94</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2034,7 +2035,7 @@ only the row slices whose indices appear in grad.indices are updated:</p>
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">gradient</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L53</p>
+<p>Defined in src/operator/optimizer_op.cc:L54</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2235,7 +2236,7 @@ a dense tensor.</p>
                                                           <span class="p">[</span><span class="mf">1.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L297</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L355</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2275,7 +2276,7 @@ a dense tensor.</p>
 <li>sqrt(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L560</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L565</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2313,7 +2314,7 @@ a dense tensor.</p>
 <li>square(csr) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L537</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L542</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -2409,7 +2410,7 @@ data = [[1,2,0],
 csr = cast_storage(data, 'csr')
 
 sum(csr, axis=0)
-[ 8.  2.  2.]
+[ 8.  3.  1.]
 
 sum(csr, axis=1)
 [ 3.  4.  5.]
@@ -2531,7 +2532,7 @@ zero than x is. In short, the fractional part of the signed number x is discarde
 <li>trunc(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L500</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L503</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
diff --git a/versions/master/api/python/symbol/symbol.html b/versions/master/api/python/symbol/symbol.html
index f186233..dd5b926 100644
--- a/versions/master/api/python/symbol/symbol.html
+++ b/versions/master/api/python/symbol/symbol.html
@@ -53,7 +53,7 @@
 <link href="../ndarray/contrib.html" rel="prev" title="Contrib NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
@@ -1459,7 +1459,7 @@ explicitly.</li>
 <td>Applies correlation to inputs.</td>
 </tr>
 <tr class="row-even"><td><a class="reference internal" href="#mxnet.symbol.Deconvolution" title="mxnet.symbol.Deconvolution"><code class="xref py py-obj docutils literal"><span class="pre">Deconvolution</span></code></a></td>
-<td>Computes 2D transposed convolution (aka fractionally strided convolution) of the input tensor.</td>
+<td>Computes 1D or 2D transposed convolution (aka fractionally strided convolution) of the input tensor.</td>
 </tr>
 <tr class="row-odd"><td><a class="reference internal" href="#mxnet.symbol.RNN" title="mxnet.symbol.RNN"><code class="xref py py-obj docutils literal"><span class="pre">RNN</span></code></a></td>
 <td>Applies a recurrent layer to input.</td>
@@ -1993,6 +1993,29 @@ in the group.</td>
 </table>
 </dd></dl>
 <dl class="method">
+<dt id="mxnet.symbol.Symbol.__len__">
+<code class="descname">__len__</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/mxnet/symbol/symbol.html#Symbol.__len__"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.symbol.Symbol.__len__" title="Permalink to this definition">¶</a></dt>
+<dd><p>Get number of outputs for the symbol.</p>
+<p class="rubric">Example</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">a</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">sym</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="s1">'a'</span><span class="p">)</span>
+<span class="gp">>>> </span><span class="n">b</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">sym</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="s1">'b'</span><span class="p">)</span>
+<span class="gp">>>> </span><span class="n">c</span> <span class="o">=</span> <span class="n">a</span> <span class="o">+</span> <span class="n">b</span>
+<span class="gp">>>> </span><span class="nb">len</span><span class="p">(</span><span class="n">c</span><span class="p">)</span>
+</pre></div>
+</div>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body"><strong>len(self)</strong> –
+Number of outputs</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">Number of outputs</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="method">
 <dt id="mxnet.symbol.Symbol.list_auxiliary_states">
 <code class="descname">list_auxiliary_states</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/mxnet/symbol/symbol.html#Symbol.list_auxiliary_states"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mxnet.symbol.Symbol.list_auxiliary_states" title="Permalink to this definition">¶</a></dt>
 <dd><p>Lists all the auxiliary states in the symbol.</p>
@@ -3563,10 +3586,10 @@ the performance.</li>
 <li><strong>data</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Input data to the ConvolutionOp.</li>
 <li><strong>weight</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Weight matrix.</li>
 <li><strong>bias</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Bias parameter.</li>
-<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Convolution kernel size: (h, w) or (d, h, w)</li>
-<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution stride: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution dilate: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – Zero pad for convolution: (h, w) or (d, h, w). Defaults to no padding.</li>
+<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Convolution kernel size: (w,), (h, w) or (d, h, w)</li>
+<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution stride: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Convolution dilate: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – Zero pad for convolution: (w,), (h, w) or (d, h, w). Defaults to no padding.</li>
 <li><strong>num_filter</strong> (<em>int (non-negative), required</em>) – Convolution filter(channel) number</li>
 <li><strong>num_group</strong> (<em>int (non-negative), optional, default=1</em>) – Number of group partitions.</li>
 <li><strong>workspace</strong> (<em>long (non-negative), optional, default=1024</em>) – Maximum temporary workspace allowed for convolution (MB).</li>
@@ -3747,7 +3770,7 @@ Please check the tutorial here: <a class="reference external" href="https://mxne
 <dl class="function">
 <dt id="mxnet.symbol.Deconvolution">
 <code class="descclassname">mxnet.symbol.</code><code class="descname">Deconvolution</code><span class="sig-paren">(</span><em>data=None</em>, <em>weight=None</em>, <em>bias=None</em>, <em>kernel=_Null</em>, <em>stride=_Null</em>, <em>dilate=_Null</em>, <em>pad=_Null</em>, <em>adj=_Null</em>, <em>target_shape=_Null</em>, <em>num_filter=_Null</em>, <em>num_group=_Null</em>, <em>workspace=_Null</em>, <em>no_bias=_Null</em>, <em>cudnn_tune=_Null</em>, <em>cudnn_off=_Null</em>, <em>layout=_N [...]
-<dd><p>Computes 2D transposed convolution (aka fractionally strided convolution) of the input tensor. This operation can be seen as the gradient of Convolution operation with respect to its input. Convolution usually reduces the size of the input. Transposed convolution works the other way, going from a smaller input to a larger output while preserving the connectivity pattern.</p>
+<dd><p>Computes 1D or 2D transposed convolution (aka fractionally strided convolution) of the input tensor. This operation can be seen as the gradient of Convolution operation with respect to its input. Convolution usually reduces the size of the input. Transposed convolution works the other way, going from a smaller input to a larger output while preserving the connectivity pattern.</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -3756,12 +3779,12 @@ Please check the tutorial here: <a class="reference external" href="https://mxne
 <li><strong>data</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Input tensor to the deconvolution operation.</li>
 <li><strong>weight</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Weights representing the kernel.</li>
 <li><strong>bias</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Bias added to the result after the deconvolution operation.</li>
-<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Deconvolution kernel size: (h, w) or (d, h, w). This is same as the kernel size used for the corresponding convolution</li>
-<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – The stride used for the corresponding convolution: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Dilation factor for each dimension of the input: (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
-<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – The amount of implicit zero padding added during convolution for each dimension of the input: (h, w) or (d, h, w). <code class="docutils literal"><span class="pre">(kernel-1)/2</span></code> is usually a good choice. If <cite>target_shape</cite> is set, <cite>pad</cite> will be ignored and a padding that will generate the target shape will be used. Defaults to no padding.</li>
-<li><strong>adj</strong> (<em>Shape(tuple), optional, default=[]</em>) – Adjustment for output shape: (h, w) or (d, h, w). If <cite>target_shape</cite> is set, <cite>adj</cite> will be ignored and computed accordingly.</li>
-<li><strong>target_shape</strong> (<em>Shape(tuple), optional, default=[]</em>) – Shape of the output tensor: (h, w) or (d, h, w).</li>
+<li><strong>kernel</strong> (<em>Shape(tuple), required</em>) – Deconvolution kernel size: (w,), (h, w) or (d, h, w). This is same as the kernel size used for the corresponding convolution</li>
+<li><strong>stride</strong> (<em>Shape(tuple), optional, default=[]</em>) – The stride used for the corresponding convolution: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>dilate</strong> (<em>Shape(tuple), optional, default=[]</em>) – Dilation factor for each dimension of the input: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.</li>
+<li><strong>pad</strong> (<em>Shape(tuple), optional, default=[]</em>) – The amount of implicit zero padding added during convolution for each dimension of the input: (w,), (h, w) or (d, h, w). <code class="docutils literal"><span class="pre">(kernel-1)/2</span></code> is usually a good choice. If <cite>target_shape</cite> is set, <cite>pad</cite> will be ignored and a padding that will generate the target shape will be used. Defaults to no padding.</li>
+<li><strong>adj</strong> (<em>Shape(tuple), optional, default=[]</em>) – Adjustment for output shape: (w,), (h, w) or (d, h, w). If <cite>target_shape</cite> is set, <cite>adj</cite> will be ignored and computed accordingly.</li>
+<li><strong>target_shape</strong> (<em>Shape(tuple), optional, default=[]</em>) – Shape of the output tensor: (w,), (h, w) or (d, h, w).</li>
 <li><strong>num_filter</strong> (<em>int (non-negative), required</em>) – Number of output filters.</li>
 <li><strong>num_group</strong> (<em>int (non-negative), optional, default=1</em>) – Number of groups partition.</li>
 <li><strong>workspace</strong> (<em>long (non-negative), optional, default=512</em>) – Maximum temporal workspace allowed for deconvolution (MB).</li>
@@ -3927,7 +3950,7 @@ Embedding(x, y, 4, 5) = [[[  5.,   6.,   7.,   8.,   9.],
                           [ 10.,  11.,  12.,  13.,  14.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L184</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L185</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -4000,7 +4023,7 @@ the input array into an output array of shape <code class="docutils literal"><sp
    <span class="p">[</span> <span class="mf">1.</span><span class="p">,</span>  <span class="mf">2.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">9.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L150</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -4943,7 +4966,7 @@ At most one dimension of shape can be -1.</p>
 </pre></div>
 </div>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L106</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L164</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -5679,7 +5702,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, m a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">m</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">/</span> <span class="p">(</span><span class="n">sqrt</span><span class="p">(</span><span class="n">v</span><span class="p">[</span><span class="n">row</span><sp [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L175</p>
+<p>Defined in src/operator/optimizer_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -6103,7 +6126,7 @@ which is computed by:</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">batch_dot</span><span class="p">(</span><span class="n">x</span><span class="p">,</span><span class="n">y</span><span class="p">)[</span><span class="n">i</span><span class="p">,:,:]</span> <span class="o">=</span> <span class="n">dot</span><span class="p">(</span><span class="n">x</span><span class="p">[</span><span class="n">i</span><span class="p">,:,:],</span> <span class="n">y</span><span class="p [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/dot.cc:L109</p>
+<p>Defined in src/operator/tensor/dot.cc:L110</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -6148,7 +6171,7 @@ an output array of shape <code class="docutils literal"><span class="pre">(i0,)<
 batch_take(x, [0,1,0]) = [ 1.  4.  5.]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L381</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L382</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7031,7 +7054,7 @@ the result is compact, which means:</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">cbrt</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">8</span><span class="p">,</span> <span class="o">-</span><span class="mi">125</span><span class="p">])</span> <span class="o">=</span> <span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="o">-</span><span class="mi">5< [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L597</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L602</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7067,7 +7090,7 @@ the result is compact, which means:</p>
 <li>ceil(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L463</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L464</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7138,7 +7161,7 @@ Clipping <code class="docutils literal"><span class="pre">x</span></code> betwee
 <li>clip(csr, a_min > 0, a_max > 0) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L424</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L486</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7318,7 +7341,7 @@ a dense tensor.</p>
                                                           <span class="p">[</span><span class="mf">1.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L297</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L355</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7405,9 +7428,10 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <li>dot(csr, default) = default</li>
 <li>dot(csr.T, default) = row_sparse</li>
 <li>dot(csr, row_sparse) = default</li>
+<li>dot(default, csr) = csr</li>
 <li>otherwise, <code class="docutils literal"><span class="pre">dot</span></code> generates output with default storage</li>
 </ul>
-<p>Defined in src/operator/tensor/dot.cc:L61</p>
+<p>Defined in src/operator/tensor/dot.cc:L62</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7565,7 +7589,7 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">exp</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L637</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L642</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7591,7 +7615,7 @@ result array will have shape <cite>(n,m,r,s)</cite>. It is computed by:</p>
 <dd><p>Inserts a new axis of size 1 into the array shape</p>
 <p>For example, given <code class="docutils literal"><span class="pre">x</span></code> with shape <code class="docutils literal"><span class="pre">(2,3,4)</span></code>, then <code class="docutils literal"><span class="pre">expand_dims(x,</span> <span class="pre">axis=1)</span></code>
 will return a new array with shape <code class="docutils literal"><span class="pre">(2,1,3,4)</span></code>.</p>
-<p>Defined in src/operator/tensor/matrix_op.cc:L231</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L289</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7624,7 +7648,7 @@ will return a new array with shape <code class="docutils literal"><span class="p
 <li>expm1(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L716</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L721</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7684,7 +7708,7 @@ will return a new array with shape <code class="docutils literal"><span class="p
 <li>fix(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L517</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L521</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7729,7 +7753,7 @@ the input array into an output array of shape <code class="docutils literal"><sp
    <span class="p">[</span> <span class="mf">1.</span><span class="p">,</span>  <span class="mf">2.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">9.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L150</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L208</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7765,7 +7789,7 @@ the input array into an output array of shape <code class="docutils literal"><sp
                       <span class="p">[</span> <span class="mf">9.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L600</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L662</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7802,7 +7826,7 @@ the input array into an output array of shape <code class="docutils literal"><sp
 <li>floor(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L481</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L483</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7823,6 +7847,50 @@ the input array into an output array of shape <code class="docutils literal"><sp
 </table>
 </dd></dl>
 <dl class="function">
+<dt id="mxnet.symbol.ftml_update">
+<code class="descclassname">mxnet.symbol.</code><code class="descname">ftml_update</code><span class="sig-paren">(</span><em>weight=None</em>, <em>grad=None</em>, <em>d=None</em>, <em>v=None</em>, <em>z=None</em>, <em>lr=_Null</em>, <em>beta1=_Null</em>, <em>beta2=_Null</em>, <em>epsilon=_Null</em>, <em>wd=_Null</em>, <em>rescale_grad=_Null</em>, <em>clip_gradient=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a cl [...]
+<dd><p>The FTML optimizer described in
+<em>FTML - Follow the Moving Leader in Deep Learning</em>,
+available at <a class="reference external" href="http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf">http://proceedings.mlr.press/v70/zheng17a/zheng17a.pdf</a>.</p>
+<div class="math">
+\[\begin{split}g_t = \nabla J(W_{t-1})\\
+v_t = \beta_2 v_{t-1} + (1 - \beta_2) g_t^2\\
+d_t = \frac{ (1 - \beta_1^t) }{ \eta_t } (\sqrt{ \frac{ v_t }{ 1 - \beta_2^t } } + \epsilon)
+\sigma_t = d_t - \beta_1 d_{t-1}
+z_t = \beta_1 z_{ t-1 } + (1 - \beta_1^t) g_t - \sigma_t W_{t-1}
+W_t = - \frac{ z_t }{ d_t }\end{split}\]</div>
+<p>Defined in src/operator/optimizer_op.cc:L161</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>weight</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Weight</li>
+<li><strong>grad</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Gradient</li>
+<li><strong>d</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Internal state <code class="docutils literal"><span class="pre">d_t</span></code></li>
+<li><strong>v</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Internal state <code class="docutils literal"><span class="pre">v_t</span></code></li>
+<li><strong>z</strong> (<a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol"><em>Symbol</em></a>) – Internal state <code class="docutils literal"><span class="pre">z_t</span></code></li>
+<li><strong>lr</strong> (<em>float, required</em>) – Learning rate</li>
+<li><strong>beta1</strong> (<em>float, optional, default=0.9</em>) – The decay rate for the 1st moment estimates.</li>
+<li><strong>beta2</strong> (<em>float, optional, default=0.999</em>) – The decay rate for the 2nd moment estimates.</li>
+<li><strong>epsilon</strong> (<em>float, optional, default=1e-08</em>) – A small constant for numerical stability.</li>
+<li><strong>wd</strong> (<em>float, optional, default=0</em>) – Weight decay augments the objective function with a regularization term that penalizes large weights. The penalty scales with the square of the magnitude of each weight.</li>
+<li><strong>rescale_grad</strong> (<em>float, optional, default=1</em>) – Rescale gradient to grad = rescale_grad*grad.</li>
+<li><strong>clip_gradient</strong> (<em>float, optional, default=-1</em>) – Clip gradient to the range of [-clip_gradient, clip_gradient] If clip_gradient <= 0, gradient clipping is turned off. grad = max(min(grad, clip_gradient), -clip_gradient).</li>
+<li><strong>name</strong> (<em>string, optional.</em>) – Name of the resulting symbol.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">The result symbol.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
 <dt id="mxnet.symbol.ftrl_update">
 <code class="descclassname">mxnet.symbol.</code><code class="descname">ftrl_update</code><span class="sig-paren">(</span><em>weight=None</em>, <em>grad=None</em>, <em>z=None</em>, <em>n=None</em>, <em>lr=_Null</em>, <em>lamda1=_Null</em>, <em>beta=_Null</em>, <em>wd=_Null</em>, <em>rescale_grad=_Null</em>, <em>clip_gradient=_Null</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.symbol.ftrl [...]
 <dd><p>Update function for Ftrl optimizer.
@@ -7844,7 +7912,7 @@ only the row slices whose indices appear in grad.indices are updated (for w, z a
     <span class="n">w</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="p">(</span><span class="n">sign</span><span class="p">(</span><span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span class="o">*</span> <span class="n">lamda1</span> <span class="o">-</span> <span class="n">z</span><span class="p">[</span><span class="n">row</span><span class="p">])</span> <span cla [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L308</p>
+<p>Defined in src/operator/optimizer_op.cc:L341</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -7988,6 +8056,56 @@ where <cite>M <= N</cite>. If <cite>M == N</cite>, output shape will simply be <
 </table>
 </dd></dl>
 <dl class="function">
+<dt id="mxnet.symbol.khatri_rao">
+<code class="descclassname">mxnet.symbol.</code><code class="descname">khatri_rao</code><span class="sig-paren">(</span><em>*args</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.symbol.khatri_rao" title="Permalink to this definition">¶</a></dt>
+<dd><p>Computes the Khatri-Rao product of the input matrices.</p>
+<p>Given a collection of <span class="math">\(n\)</span> input matrices,</p>
+<div class="math">
+\[A_1 \in \mathbb{R}^{M_1 \times M}, \ldots, A_n \in \mathbb{R}^{M_n \times N},\]</div>
+<p>the (column-wise) Khatri-Rao product is defined as the matrix,</p>
+<div class="math">
+\[X = A_1 \otimes \cdots \otimes A_n \in \mathbb{R}^{(M_1 \cdots M_n) \times N},\]</div>
+<p>where the <span class="math">\(k`th column is equal to the column-wise outer product
+:math:`{A_1}_k \otimes \cdots \otimes {A_n}_k\)</span> where <span class="math">\({A_i}_k\)</span> is the kth
+column of the ith matrix.</p>
+<p>Example:</p>
+<div class="highlight-python"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">A</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">],</span>
+<span class="gp">>>> </span>                 <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="o">-</span><span class="mi">3</span><span class="p">]])</span>
+<span class="gp">>>> </span><span class="n">B</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">4</span><span class="p">],</span>
+<span class="gp">>>> </span>                 <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="mi">5</span><span class="p">],</span>
+<span class="gp">>>> </span>                 <span class="p">[</span><span class="mi">3</span><span class="p">,</span> <span class="mi">6</span><span class="p">]])</span>
+<span class="gp">>>> </span><span class="n">C</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">khatri_rao</span><span class="p">(</span><span class="n">A</span><span class="p">,</span> <span class="n">B</span><span class="p">)</span>
+<span class="gp">>>> </span><span class="k">print</span><span class="p">(</span><span class="n">C</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">())</span>
+<span class="go">[[  1.  -4.]</span>
+<span class="go"> [  2.  -5.]</span>
+<span class="go"> [  3.  -6.]</span>
+<span class="go"> [  2. -12.]</span>
+<span class="go"> [  4. -15.]</span>
+<span class="go"> [  6. -18.]]</span>
+</pre></div>
+</div>
+<p>Defined in src/operator/contrib/krprod.cc:L108
+This function support variable length of positional input.</p>
+<table class="docutils field-list" frame="void" rules="none">
+<col class="field-name"/>
+<col class="field-body"/>
+<tbody valign="top">
+<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
+<li><strong>args</strong> (<em>Symbol[]</em>) – Positional input matrices</li>
+<li><strong>name</strong> (<em>string, optional.</em>) – Name of the resulting symbol.</li>
+</ul>
+</td>
+</tr>
+<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">The result symbol.</p>
+</td>
+</tr>
+<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></p>
+</td>
+</tr>
+</tbody>
+</table>
+</dd></dl>
+<dl class="function">
 <dt id="mxnet.symbol.linalg_gelqf">
 <code class="descclassname">mxnet.symbol.</code><code class="descname">linalg_gelqf</code><span class="sig-paren">(</span><em>A=None</em>, <em>name=None</em>, <em>attr=None</em>, <em>out=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.symbol.linalg_gelqf" title="Permalink to this definition">¶</a></dt>
 <dd><p>LQ factorization for general matrix.
@@ -8490,7 +8608,7 @@ trsm(A, B, alpha=0.5) = [[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
 <dd><p>Returns element-wise Natural logarithmic value of the input.</p>
 <p>The natural logarithm is logarithm in base <em>e</em>, so that <code class="docutils literal"><span class="pre">log(exp(x))</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L649</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L654</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8516,7 +8634,7 @@ trsm(A, B, alpha=0.5) = [[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
 <dd><p>Returns element-wise Base-10 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">10**log10(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log10</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L661</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L666</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8549,7 +8667,7 @@ trsm(A, B, alpha=0.5) = [[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
 <li>log1p(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L698</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L703</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -8575,7 +8693,7 @@ trsm(A, B, alpha=0.5) = [[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],
 <dd><p>Returns element-wise Base-2 logarithmic value of the input.</p>
 <p><code class="docutils literal"><span class="pre">2**log2(x)</span> <span class="pre">=</span> <span class="pre">x</span></code></p>
 <p>The storage type of <code class="docutils literal"><span class="pre">log2</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L673</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L678</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9120,7 +9238,7 @@ one_hot([[1,0],[1,0],[2,0]], 3) = [[[ 0.  1.  0.]
                                     [ 1.  0.  0.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L427</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L428</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9668,7 +9786,7 @@ Samples will always be returned as a floating point data type.</p>
 <div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">rcbrt</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span><span class="mi">8</span><span class="p">,</span><span class="o">-</span><span class="mi">125</span><span class="p">])</span> <span class="o">=</span> <span class="p">[</span><span class="mf">1.0</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">,</span> <span class="o">-</span><span class="mf" [...]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L614</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L619</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9775,7 +9893,7 @@ elements:</p>
                                  <span class="p">[</span> <span class="mf">3.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">,</span>  <span class="mf">4.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L498</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L560</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9863,7 +9981,7 @@ At most one dimension of shape can be -1.</p>
 </pre></div>
 </div>
 </div></blockquote>
-<p>Defined in src/operator/tensor/matrix_op.cc:L106</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L164</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -9927,7 +10045,7 @@ At most one dimension of shape can be -1.</p>
                       <span class="p">[</span> <span class="mf">9.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">,</span>  <span class="mf">7.</span><span class="p">,</span>  <span class="mf">6.</span><span class="p">,</span>  <span class="mf">5.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L600</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L662</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10016,7 +10134,7 @@ gradient and <span class="math">\(E[g^2]_t\)</span> is the decaying average over
 Tieleman &amp; Hinton, 2012.</p>
 <p>Hinton suggests the momentum term <span class="math">\(\gamma\)</span> to be 0.9 and the learning rate
 <span class="math">\(\eta\)</span> to be 0.001.</p>
-<p>Defined in src/operator/optimizer_op.cc:L229</p>
+<p>Defined in src/operator/optimizer_op.cc:L262</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10063,7 +10181,7 @@ E[g]_t = \gamma_1 * E[g]_{t-1} + (1 - \gamma_1) * g_t\\
 <a class="reference external" href="http://arxiv.org/pdf/1308.0850v5.pdf">http://arxiv.org/pdf/1308.0850v5.pdf</a> Eq(38) - Eq(45) by Alex Graves, 2013.</p>
 <p>Graves suggests the momentum term <span class="math">\(\gamma_1\)</span> to be 0.95, <span class="math">\(\gamma_2\)</span>
 to be 0.9 and the learning rate <span class="math">\(\eta\)</span> to be 0.0001.</p>
-<p>Defined in src/operator/optimizer_op.cc:L268</p>
+<p>Defined in src/operator/optimizer_op.cc:L301</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10141,7 +10259,7 @@ to be 0.9 and the learning rate <span class="math">\(\eta\)</span> to be 0.0001.
 </pre></div>
 </div>
 <p>The storage type of <code class="docutils literal"><span class="pre">rsqrt</span></code> output is always dense</p>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L580</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L585</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10186,7 +10304,7 @@ sample_exponential(lam, shape=(2)) = [[ 0.51837951,  0.19866663],
                                       [ 0.09994757,  0.50447971]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L283</p>
+<p>Defined in src/operator/random/multisample_op.cc:L284</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10234,7 +10352,7 @@ sample_gamma(alpha, beta, shape=(2)) = [[ 0.        ,  0.        ],
                                         [ 2.25797319,  1.70734084]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L281</p>
+<p>Defined in src/operator/random/multisample_op.cc:L282</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10284,7 +10402,7 @@ sample_generalized_negative_binomial(mu, alpha, shape=(2)) = [[ 0.,  3.],
                                                               [ 3.,  1.]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L292</p>
+<p>Defined in src/operator/random/multisample_op.cc:L293</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10384,7 +10502,7 @@ sample_negative_binomial(k, p, shape=(2)) = [[ 15.,  50.],
                                              [ 16.,  12.]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L288</p>
+<p>Defined in src/operator/random/multisample_op.cc:L289</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10433,7 +10551,7 @@ sample_normal(mu, sigma, shape=(2)) = [[-0.56410581,  0.2928229 ],
                                        [ 0.95934606,  4.48287058]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L278</p>
+<p>Defined in src/operator/random/multisample_op.cc:L279</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10482,7 +10600,7 @@ sample_poisson(lam, shape=(2)) = [[  0.,   4.],
                                   [ 13.,   8.]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L285</p>
+<p>Defined in src/operator/random/multisample_op.cc:L286</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10530,7 +10648,7 @@ sample_uniform(low, high, shape=(2)) = [[ 0.40451524,  0.18017688],
                                         [ 3.18687344,  3.68352246]]
 </pre></div>
 </div>
-<p>Defined in src/operator/random/multisample_op.cc:L276</p>
+<p>Defined in src/operator/random/multisample_op.cc:L277</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10620,7 +10738,7 @@ only the row slices whose indices appear in grad.indices are updated (for both w
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">+=</span> <span class="n">v</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L93</p>
+<p>Defined in src/operator/optimizer_op.cc:L94</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10661,7 +10779,7 @@ only the row slices whose indices appear in grad.indices are updated:</p>
     <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">=</span> <span class="n">weight</span><span class="p">[</span><span class="n">row</span><span class="p">]</span> <span class="o">-</span> <span class="n">learning_rate</span> <span class="o">*</span> <span class="n">gradient</span><span class="p">[</span><span class="n">row</span><span class="p">]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/optimizer_op.cc:L53</p>
+<p>Defined in src/operator/optimizer_op.cc:L54</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10862,7 +10980,7 @@ a dense tensor.</p>
                                                           <span class="p">[</span><span class="mf">1.</span><span class="p">,</span>  <span class="mf">3.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L297</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L355</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -10908,7 +11026,7 @@ to the <cite>end</cite> index.</p>
                                            <span class="p">[</span> <span class="mf">10.</span><span class="p">,</span>  <span class="mf">11.</span><span class="p">]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L380</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L442</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11208,7 +11326,7 @@ z[0].shape = (2 ,1 )
 <li>sqrt(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L560</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L565</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11246,7 +11364,7 @@ z[0].shape = (2 ,1 )
 <li>square(csr) = csr</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L537</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L542</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11380,7 +11498,7 @@ data = [[1,2,0],
 csr = cast_storage(data, 'csr')
 
 sum(csr, axis=0)
-[ 8.  2.  2.]
+[ 8.  3.  1.]
 
 sum(csr, axis=1)
 [ 3.  4.  5.]
@@ -11448,7 +11566,7 @@ data = [[1,2,0],
 csr = cast_storage(data, 'csr')
 
 sum(csr, axis=0)
-[ 8.  2.  2.]
+[ 8.  3.  1.]
 
 sum(csr, axis=1)
 [ 3.  4.  5.]
@@ -11559,7 +11677,7 @@ take(x, [[0,1],[1,2]]) = [[[ 1.,  2.],
                            [ 5.,  6.]]]
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/indexing_op.cc:L326</p>
+<p>Defined in src/operator/tensor/indexing_op.cc:L327</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11689,7 +11807,7 @@ shape <code class="docutils literal"><span class="pre">(2,2)</span></code> array
 </div>
 </li>
 </ul>
-<p>Defined in src/operator/tensor/matrix_op.cc:L559</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L621</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11793,7 +11911,7 @@ topk(x, ret_typ='both', k=2) = [[[ 0.4,  0.3], [ 0.3,  0.2]] ,  [[ 2.,  0.], [ 1
                                <span class="p">[</span> <span class="mf">7.</span><span class="p">,</span>  <span class="mf">8.</span><span class="p">]]]</span>
 </pre></div>
 </div>
-<p>Defined in src/operator/tensor/matrix_op.cc:L195</p>
+<p>Defined in src/operator/tensor/matrix_op.cc:L253</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
@@ -11831,7 +11949,7 @@ zero than x is. In short, the fractional part of the signed number x is discarde
 <li>trunc(row_sparse) = row_sparse</li>
 </ul>
 </div></blockquote>
-<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L500</p>
+<p>Defined in src/operator/tensor/elemwise_unary_op_basic.cc:L503</p>
 <table class="docutils field-list" frame="void" rules="none">
 <col class="field-name"/>
 <col class="field-body"/>
diff --git a/versions/master/api/python/symbol_in_pictures/symbol_in_pictures.html b/versions/master/api/python/symbol_in_pictures/symbol_in_pictures.html
index 03f027d..a3a9eb4 100644
--- a/versions/master/api/python/symbol_in_pictures/symbol_in_pictures.html
+++ b/versions/master/api/python/symbol_in_pictures/symbol_in_pictures.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/r/index.html b/versions/master/api/r/index.html
index 74e3e5e..afaec54 100644
--- a/versions/master/api/r/index.html
+++ b/versions/master/api/r/index.html
@@ -52,7 +52,7 @@
 <link href="../python/rtc/rtc.html" rel="prev" title="Run-Time Compilation API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/docs/ml/dmlc/mxnet/DataIter.html b/versions/master/api/scala/docs/ml/dmlc/mxnet/DataIter.html
index 76945d6..d2781f3 100644
--- a/versions/master/api/scala/docs/ml/dmlc/mxnet/DataIter.html
+++ b/versions/master/api/scala/docs/ml/dmlc/mxnet/DataIter.html
@@ -105,7 +105,7 @@
 <div class="types members" id="types">
 <h3>Type Members</h3>
 <ol><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator.GroupedIterator" visbl="pub">
-<a id="GroupedIterator[B&gt;:A]extendsAbstractIterator[Seq[B]]withIterator[Seq[B]]"></a>
+<a id="GroupedIterator[B&lt;:&lt;?&gt;]extendsAbstractIterator[Seq[B]]withIterator[Seq[B]]"></a>
 <a id="GroupedIterator[B&gt;:A]:GroupedIterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -317,7 +317,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#++" visbl="pub">
-<a id="++[B&gt;:A](that:=&gt;scala.collection.GenTraversableOnce[B]):Iterator[B]"></a>
+<a id="++[B&lt;:&lt;?&gt;](that:&lt;?&gt;):Iterator[B]"></a>
 <a id="++[B&gt;:DataBatch](⇒GenTraversableOnce[B]):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -508,7 +508,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>)</span>
 </dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#collect" visbl="pub">
-<a id="collect[B](pf:PartialFunction[A,B]):Iterator[B]"></a>
+<a id="collect[B&lt;:&lt;?&gt;](pf:&lt;?&gt;):Iterator[B]"></a>
 <a id="collect[B](PartialFunction[DataBatch,B]):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -544,7 +544,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#contains" visbl="pub">
-<a id="contains(elem:Any):Boolean"></a>
+<a id="contains(elem:&lt;?&gt;):Boolean"></a>
 <a id="contains(Any):Boolean"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -629,7 +629,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#corresponds" visbl="pub">
-<a id="corresponds[B](that:scala.collection.GenTraversableOnce[B])(p:(A,B)=&gt;Boolean):Boolean"></a>
+<a id="corresponds[B&lt;:&lt;?&gt;](that:&lt;?&gt;)(p:&lt;?&gt;):Boolean"></a>
 <a id="corresponds[B](GenTraversableOnce[B])((DataBatch,B)⇒Boolean):Boolean"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -679,7 +679,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </a>
 </span>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#drop" visbl="pub">
-<a id="drop(n:Int):Iterator[A]"></a>
+<a id="drop(n:&lt;?&gt;):Iterator[A]"></a>
 <a id="drop(Int):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -696,7 +696,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#dropWhile" visbl="pub">
-<a id="dropWhile(p:A=&gt;Boolean):Iterator[A]"></a>
+<a id="dropWhile(p:&lt;?&gt;):Iterator[A]"></a>
 <a id="dropWhile((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -781,7 +781,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator → TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#filter" visbl="pub">
-<a id="filter(p:A=&gt;Boolean):Iterator[A]"></a>
+<a id="filter(p:&lt;?&gt;):Iterator[A]"></a>
 <a id="filter((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -798,7 +798,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#filterNot" visbl="pub">
-<a id="filterNot(p:A=&gt;Boolean):Iterator[A]"></a>
+<a id="filterNot(p:&lt;?&gt;):Iterator[A]"></a>
 <a id="filterNot((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -853,7 +853,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator → TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#flatMap" visbl="pub">
-<a id="flatMap[B](f:A=&gt;scala.collection.GenTraversableOnce[B]):Iterator[B]"></a>
+<a id="flatMap[B&lt;:&lt;?&gt;](f:&lt;?&gt;):Iterator[B]"></a>
 <a id="flatMap[B]((DataBatch)⇒GenTraversableOnce[B]):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -972,7 +972,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#grouped" visbl="pub">
-<a id="grouped[B&gt;:A](size:Int):Iterator.this.GroupedIterator[B]"></a>
+<a id="grouped[B&lt;:&lt;?&gt;](size:&lt;?&gt;):Iterator.this.GroupedIterator[B]"></a>
 <a id="grouped[B&gt;:DataBatch](Int):GroupedIterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1023,7 +1023,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#indexOf" visbl="pub">
-<a id="indexOf[B&gt;:A](elem:B):Int"></a>
+<a id="indexOf[B&lt;:&lt;?&gt;](elem:&lt;?&gt;):Int"></a>
 <a id="indexOf[B&gt;:DataBatch](B):Int"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1040,7 +1040,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#indexWhere" visbl="pub">
-<a id="indexWhere(p:A=&gt;Boolean):Int"></a>
+<a id="indexWhere(p:&lt;?&gt;):Int"></a>
 <a id="indexWhere((DataBatch)⇒Boolean):Int"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1350,7 +1350,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#padTo" visbl="pub">
-<a id="padTo[A1&gt;:A](len:Int,elem:A1):Iterator[A1]"></a>
+<a id="padTo[A1&lt;:&lt;?&gt;](len:&lt;?&gt;,elem:&lt;?&gt;):Iterator[A1]"></a>
 <a id="padTo[A1&gt;:DataBatch](Int,A1):Iterator[A1]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1367,7 +1367,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#partition" visbl="pub">
-<a id="partition(p:A=&gt;Boolean):(Iterator[A],Iterator[A])"></a>
+<a id="partition(p:&lt;?&gt;):(Iterator[A],Iterator[A])"></a>
 <a id="partition((DataBatch)⇒Boolean):(Iterator[DataBatch],Iterator[DataBatch])"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1384,7 +1384,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#patch" visbl="pub">
-<a id="patch[B&gt;:A](from:Int,patchElems:Iterator[B],replaced:Int):Iterator[B]"></a>
+<a id="patch[B&lt;:&lt;?&gt;](from:&lt;?&gt;,patchElems:&lt;?&gt;,replaced:&lt;?&gt;):Iterator[B]"></a>
 <a id="patch[B&gt;:DataBatch](Int,Iterator[B],Int):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1537,7 +1537,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[this] </dd><dt>Definition Classes</dt><dd>TraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#sameElements" visbl="pub">
-<a id="sameElements(that:Iterator[_]):Boolean"></a>
+<a id="sameElements(that:&lt;?&gt;):Boolean"></a>
 <a id="sameElements(Iterator[_]):Boolean"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1554,7 +1554,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#scanLeft" visbl="pub">
-<a id="scanLeft[B](z:B)(op:(B,A)=&gt;B):Iterator[B]"></a>
+<a id="scanLeft[B&lt;:&lt;?&gt;](z:&lt;?&gt;)(op:&lt;?&gt;):Iterator[B]"></a>
 <a id="scanLeft[B](B)((B,DataBatch)⇒B):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1571,7 +1571,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#scanRight" visbl="pub">
-<a id="scanRight[B](z:B)(op:(A,B)=&gt;B):Iterator[B]"></a>
+<a id="scanRight[B&lt;:&lt;?&gt;](z:&lt;?&gt;)(op:&lt;?&gt;):Iterator[B]"></a>
 <a id="scanRight[B](B)((DataBatch,B)⇒B):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1622,7 +1622,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#slice" visbl="pub">
-<a id="slice(from:Int,until:Int):Iterator[A]"></a>
+<a id="slice(from:&lt;?&gt;,until:&lt;?&gt;):Iterator[A]"></a>
 <a id="slice(Int,Int):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1639,7 +1639,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#sliding" visbl="pub">
-<a id="sliding[B&gt;:A](size:Int,step:Int):Iterator.this.GroupedIterator[B]"></a>
+<a id="sliding[B&lt;:&lt;?&gt;](size:&lt;?&gt;,step:&lt;?&gt;):Iterator.this.GroupedIterator[B]"></a>
 <a id="sliding[B&gt;:DataBatch](Int,Int):GroupedIterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1656,7 +1656,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#span" visbl="pub">
-<a id="span(p:A=&gt;Boolean):(Iterator[A],Iterator[A])"></a>
+<a id="span(p:&lt;?&gt;):(Iterator[A],Iterator[A])"></a>
 <a id="span((DataBatch)⇒Boolean):(Iterator[DataBatch],Iterator[DataBatch])"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1707,7 +1707,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#take" visbl="pub">
-<a id="take(n:Int):Iterator[A]"></a>
+<a id="take(n:&lt;?&gt;):Iterator[A]"></a>
 <a id="take(Int):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1724,7 +1724,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#takeWhile" visbl="pub">
-<a id="takeWhile(p:A=&gt;Boolean):Iterator[A]"></a>
+<a id="takeWhile(p:&lt;?&gt;):Iterator[A]"></a>
 <a id="takeWhile((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -2042,7 +2042,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>)</span>
 </dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#withFilter" visbl="pub">
-<a id="withFilter(p:A=&gt;Boolean):Iterator[A]"></a>
+<a id="withFilter(p:&lt;?&gt;):Iterator[A]"></a>
 <a id="withFilter((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -2059,7 +2059,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#zip" visbl="pub">
-<a id="zip[B](that:Iterator[B]):Iterator[(A,B)]"></a>
+<a id="zip[B&lt;:&lt;?&gt;](that:&lt;?&gt;):Iterator[(A,B)]"></a>
 <a id="zip[B](Iterator[B]):Iterator[(DataBatch,B)]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -2076,7 +2076,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#zipAll" visbl="pub">
-<a id="zipAll[B,A1&gt;:A,B1&gt;:B](that:Iterator[B],thisElem:A1,thatElem:B1):Iterator[(A1,B1)]"></a>
+<a id="zipAll[B&lt;:&lt;?&gt;,A1&lt;:&lt;?&gt;,B1&lt;:&lt;?&gt;](that:&lt;?&gt;,thisElem:&lt;?&gt;,thatElem:&lt;?&gt;):Iterator[(A1,B1)]"></a>
 <a id="zipAll[B,A1&gt;:DataBatch,B1&gt;:B](Iterator[B],A1,B1):Iterator[(A1,B1)]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
diff --git a/versions/master/api/scala/docs/ml/dmlc/mxnet/DataPack.html b/versions/master/api/scala/docs/ml/dmlc/mxnet/DataPack.html
index 0fdd793..a67495b 100644
--- a/versions/master/api/scala/docs/ml/dmlc/mxnet/DataPack.html
+++ b/versions/master/api/scala/docs/ml/dmlc/mxnet/DataPack.html
@@ -437,7 +437,7 @@
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableLike → GenTraversableLike</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.TraversableOnce#collectFirst" visbl="pub">
-<a id="collectFirst[B](pf:PartialFunction[A,B]):Option[B]"></a>
+<a id="collectFirst[B&lt;:&lt;?&gt;](pf:&lt;?&gt;):Option[B]"></a>
 <a id="collectFirst[B](PartialFunction[DataBatch,B]):Option[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -522,7 +522,7 @@
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.TraversableOnce#copyToBuffer" visbl="pub">
-<a id="copyToBuffer[B&gt;:A](dest:scala.collection.mutable.Buffer[B]):Unit"></a>
+<a id="copyToBuffer[B&lt;:&lt;?&gt;](dest:&lt;?&gt;):Unit"></a>
 <a id="copyToBuffer[B&gt;:DataBatch](Buffer[B]):Unit"></a>
 <h4 class="signature">
 <span class="modifier_kind">
diff --git a/versions/master/api/scala/docs/ml/dmlc/mxnet/io/PrefetchingIter.html b/versions/master/api/scala/docs/ml/dmlc/mxnet/io/PrefetchingIter.html
index bf14927..cd19229 100644
--- a/versions/master/api/scala/docs/ml/dmlc/mxnet/io/PrefetchingIter.html
+++ b/versions/master/api/scala/docs/ml/dmlc/mxnet/io/PrefetchingIter.html
@@ -104,7 +104,7 @@ and combine them with prefetching.
 <div class="types members" id="types">
 <h3>Type Members</h3>
 <ol><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator.GroupedIterator" visbl="pub">
-<a id="GroupedIterator[B&lt;:&lt;?&gt;]extendsAbstractIterator[Seq[B]]withIterator[Seq[B]]"></a>
+<a id="GroupedIterator[B&gt;:A]extendsAbstractIterator[Seq[B]]withIterator[Seq[B]]"></a>
 <a id="GroupedIterator[B&gt;:A]:GroupedIterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -159,7 +159,7 @@ and combine them with prefetching.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#++" visbl="pub">
-<a id="++[B&lt;:&lt;?&gt;](that:&lt;?&gt;):Iterator[B]"></a>
+<a id="++[B&gt;:A](that:=&gt;scala.collection.GenTraversableOnce[B]):Iterator[B]"></a>
 <a id="++[B&gt;:DataBatch](⇒GenTraversableOnce[B]):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -367,7 +367,7 @@ and combine them with prefetching.
 </span>)</span>
 </dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#collect" visbl="pub">
-<a id="collect[B&lt;:&lt;?&gt;](pf:&lt;?&gt;):Iterator[B]"></a>
+<a id="collect[B](pf:PartialFunction[A,B]):Iterator[B]"></a>
 <a id="collect[B](PartialFunction[DataBatch,B]):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -386,7 +386,7 @@ and combine them with prefetching.
 <span class="name">@migration</span>
 </dd><dt>Migration</dt><dd class="cmt"><p><i>(Changed in version 2.8.0)</i> <code>collect</code> has changed. The previous behavior can be reproduced with <code>toSeq</code>.</p></dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.TraversableOnce#collectFirst" visbl="pub">
-<a id="collectFirst[B&lt;:&lt;?&gt;](pf:&lt;?&gt;):Option[B]"></a>
+<a id="collectFirst[B](pf:PartialFunction[A,B]):Option[B]"></a>
 <a id="collectFirst[B](PartialFunction[DataBatch,B]):Option[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -403,7 +403,7 @@ and combine them with prefetching.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#contains" visbl="pub">
-<a id="contains(elem:&lt;?&gt;):Boolean"></a>
+<a id="contains(elem:Any):Boolean"></a>
 <a id="contains(Any):Boolean"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -471,7 +471,7 @@ and combine them with prefetching.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.TraversableOnce#copyToBuffer" visbl="pub">
-<a id="copyToBuffer[B&lt;:&lt;?&gt;](dest:&lt;?&gt;):Unit"></a>
+<a id="copyToBuffer[B&gt;:A](dest:scala.collection.mutable.Buffer[B]):Unit"></a>
 <a id="copyToBuffer[B&gt;:DataBatch](Buffer[B]):Unit"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -488,7 +488,7 @@ and combine them with prefetching.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#corresponds" visbl="pub">
-<a id="corresponds[B&lt;:&lt;?&gt;](that:&lt;?&gt;)(p:&lt;?&gt;):Boolean"></a>
+<a id="corresponds[B](that:scala.collection.GenTraversableOnce[B])(p:(A,B)=&gt;Boolean):Boolean"></a>
 <a id="corresponds[B](GenTraversableOnce[B])((DataBatch,B)⇒Boolean):Boolean"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -558,7 +558,7 @@ and combine them with prefetching.
 The object shall never be used after it is disposed.
 </p></div></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#drop" visbl="pub">
-<a id="drop(n:&lt;?&gt;):Iterator[A]"></a>
+<a id="drop(n:Int):Iterator[A]"></a>
 <a id="drop(Int):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -575,7 +575,7 @@ The object shall never be used after it is disposed.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#dropWhile" visbl="pub">
-<a id="dropWhile(p:&lt;?&gt;):Iterator[A]"></a>
+<a id="dropWhile(p:A=&gt;Boolean):Iterator[A]"></a>
 <a id="dropWhile((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -660,7 +660,7 @@ The object shall never be used after it is disposed.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator → TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#filter" visbl="pub">
-<a id="filter(p:&lt;?&gt;):Iterator[A]"></a>
+<a id="filter(p:A=&gt;Boolean):Iterator[A]"></a>
 <a id="filter((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -677,7 +677,7 @@ The object shall never be used after it is disposed.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#filterNot" visbl="pub">
-<a id="filterNot(p:&lt;?&gt;):Iterator[A]"></a>
+<a id="filterNot(p:A=&gt;Boolean):Iterator[A]"></a>
 <a id="filterNot((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -732,7 +732,7 @@ The object shall never be used after it is disposed.
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator → TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#flatMap" visbl="pub">
-<a id="flatMap[B&lt;:&lt;?&gt;](f:&lt;?&gt;):Iterator[B]"></a>
+<a id="flatMap[B](f:A=&gt;scala.collection.GenTraversableOnce[B]):Iterator[B]"></a>
 <a id="flatMap[B]((DataBatch)⇒GenTraversableOnce[B]):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -921,7 +921,7 @@ The object shall never be used after it is disposed.
 in current batch</p><div class="fullcomment"><div class="comment cmt"><p>get the number of padding examples
 in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class="cmt"><p>number of padding examples in current batch</p></dd></dl><dl class="attributes block"> <dt>Definition Classes</dt><dd><a class="extype" href="" name="ml.dmlc.mxnet.io.PrefetchingIter">PrefetchingIter</a> → <a class="extype" href="../DataIter.html" name="ml.dmlc.mxnet.DataIter">DataIter</a></dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#grouped" visbl="pub">
-<a id="grouped[B&lt;:&lt;?&gt;](size:&lt;?&gt;):Iterator.this.GroupedIterator[B]"></a>
+<a id="grouped[B&gt;:A](size:Int):Iterator.this.GroupedIterator[B]"></a>
 <a id="grouped[B&gt;:DataBatch](Int):GroupedIterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -989,7 +989,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#indexOf" visbl="pub">
-<a id="indexOf[B&lt;:&lt;?&gt;](elem:&lt;?&gt;):Int"></a>
+<a id="indexOf[B&gt;:A](elem:B):Int"></a>
 <a id="indexOf[B&gt;:DataBatch](B):Int"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1006,7 +1006,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#indexWhere" visbl="pub">
-<a id="indexWhere(p:&lt;?&gt;):Int"></a>
+<a id="indexWhere(p:A=&gt;Boolean):Int"></a>
 <a id="indexWhere((DataBatch)⇒Boolean):Int"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1312,7 +1312,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#padTo" visbl="pub">
-<a id="padTo[A1&lt;:&lt;?&gt;](len:&lt;?&gt;,elem:&lt;?&gt;):Iterator[A1]"></a>
+<a id="padTo[A1&gt;:A](len:Int,elem:A1):Iterator[A1]"></a>
 <a id="padTo[A1&gt;:DataBatch](Int,A1):Iterator[A1]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1329,7 +1329,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#partition" visbl="pub">
-<a id="partition(p:&lt;?&gt;):(Iterator[A],Iterator[A])"></a>
+<a id="partition(p:A=&gt;Boolean):(Iterator[A],Iterator[A])"></a>
 <a id="partition((DataBatch)⇒Boolean):(Iterator[DataBatch],Iterator[DataBatch])"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1346,7 +1346,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#patch" visbl="pub">
-<a id="patch[B&lt;:&lt;?&gt;](from:&lt;?&gt;,patchElems:&lt;?&gt;,replaced:&lt;?&gt;):Iterator[B]"></a>
+<a id="patch[B&gt;:A](from:Int,patchElems:Iterator[B],replaced:Int):Iterator[B]"></a>
 <a id="patch[B&gt;:DataBatch](Int,Iterator[B],Int):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1568,7 +1568,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[this] </dd><dt>Definition Classes</dt><dd>TraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#sameElements" visbl="pub">
-<a id="sameElements(that:&lt;?&gt;):Boolean"></a>
+<a id="sameElements(that:Iterator[_]):Boolean"></a>
 <a id="sameElements(Iterator[_]):Boolean"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1585,7 +1585,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#scanLeft" visbl="pub">
-<a id="scanLeft[B&lt;:&lt;?&gt;](z:&lt;?&gt;)(op:&lt;?&gt;):Iterator[B]"></a>
+<a id="scanLeft[B](z:B)(op:(B,A)=&gt;B):Iterator[B]"></a>
 <a id="scanLeft[B](B)((B,DataBatch)⇒B):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1602,7 +1602,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#scanRight" visbl="pub">
-<a id="scanRight[B&lt;:&lt;?&gt;](z:&lt;?&gt;)(op:&lt;?&gt;):Iterator[B]"></a>
+<a id="scanRight[B](z:B)(op:(A,B)=&gt;B):Iterator[B]"></a>
 <a id="scanRight[B](B)((DataBatch,B)⇒B):Iterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1653,7 +1653,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>TraversableOnce → GenTraversableOnce</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#slice" visbl="pub">
-<a id="slice(from:&lt;?&gt;,until:&lt;?&gt;):Iterator[A]"></a>
+<a id="slice(from:Int,until:Int):Iterator[A]"></a>
 <a id="slice(Int,Int):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1670,7 +1670,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#sliding" visbl="pub">
-<a id="sliding[B&lt;:&lt;?&gt;](size:&lt;?&gt;,step:&lt;?&gt;):Iterator.this.GroupedIterator[B]"></a>
+<a id="sliding[B&gt;:A](size:Int,step:Int):Iterator.this.GroupedIterator[B]"></a>
 <a id="sliding[B&gt;:DataBatch](Int,Int):GroupedIterator[B]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1687,7 +1687,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#span" visbl="pub">
-<a id="span(p:&lt;?&gt;):(Iterator[A],Iterator[A])"></a>
+<a id="span(p:A=&gt;Boolean):(Iterator[A],Iterator[A])"></a>
 <a id="span((DataBatch)⇒Boolean):(Iterator[DataBatch],Iterator[DataBatch])"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1738,7 +1738,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#take" visbl="pub">
-<a id="take(n:&lt;?&gt;):Iterator[A]"></a>
+<a id="take(n:Int):Iterator[A]"></a>
 <a id="take(Int):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -1755,7 +1755,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#takeWhile" visbl="pub">
-<a id="takeWhile(p:&lt;?&gt;):Iterator[A]"></a>
+<a id="takeWhile(p:A=&gt;Boolean):Iterator[A]"></a>
 <a id="takeWhile((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -2073,7 +2073,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>)</span>
 </dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#withFilter" visbl="pub">
-<a id="withFilter(p:&lt;?&gt;):Iterator[A]"></a>
+<a id="withFilter(p:A=&gt;Boolean):Iterator[A]"></a>
 <a id="withFilter((DataBatch)⇒Boolean):Iterator[DataBatch]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -2090,7 +2090,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#zip" visbl="pub">
-<a id="zip[B&lt;:&lt;?&gt;](that:&lt;?&gt;):Iterator[(A,B)]"></a>
+<a id="zip[B](that:Iterator[B]):Iterator[(A,B)]"></a>
 <a id="zip[B](Iterator[B]):Iterator[(DataBatch,B)]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
@@ -2107,7 +2107,7 @@ in current batch</p></div><dl class="paramcmts block"><dt>returns</dt><dd class=
 </span>
 <div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Iterator</dd></dl></div>
 </li><li data-isabs="false" fullcomment="yes" group="Ungrouped" name="scala.collection.Iterator#zipAll" visbl="pub">
-<a id="zipAll[B&lt;:&lt;?&gt;,A1&lt;:&lt;?&gt;,B1&lt;:&lt;?&gt;](that:&lt;?&gt;,thisElem:&lt;?&gt;,thatElem:&lt;?&gt;):Iterator[(A1,B1)]"></a>
+<a id="zipAll[B,A1&gt;:A,B1&gt;:B](that:Iterator[B],thisElem:A1,thatElem:B1):Iterator[(A1,B1)]"></a>
 <a id="zipAll[B,A1&gt;:DataBatch,B1&gt;:B](Iterator[B],A1,B1):Iterator[(A1,B1)]"></a>
 <h4 class="signature">
 <span class="modifier_kind">
diff --git a/versions/master/api/scala/index.html b/versions/master/api/scala/index.html
index e0598ef..edfd505 100644
--- a/versions/master/api/scala/index.html
+++ b/versions/master/api/scala/index.html
@@ -52,7 +52,7 @@
 <link href="../c++/index.html" rel="prev" title="MXNet - C++ API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/io.html b/versions/master/api/scala/io.html
index 8ae20a7..3bfd9e7 100644
--- a/versions/master/api/scala/io.html
+++ b/versions/master/api/scala/io.html
@@ -53,7 +53,7 @@
 <link href="symbol.html" rel="prev" title="MXNet Scala Symbolic API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/kvstore.html b/versions/master/api/scala/kvstore.html
index df59258..a9ed936 100644
--- a/versions/master/api/scala/kvstore.html
+++ b/versions/master/api/scala/kvstore.html
@@ -53,7 +53,7 @@
 <link href="ndarray.html" rel="prev" title="NDArray API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/model.html b/versions/master/api/scala/model.html
index e4d115c..03d563a 100644
--- a/versions/master/api/scala/model.html
+++ b/versions/master/api/scala/model.html
@@ -53,7 +53,7 @@
 <link href="module.html" rel="prev" title="Module API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/module.html b/versions/master/api/scala/module.html
index df082ac..5c6fe9b 100644
--- a/versions/master/api/scala/module.html
+++ b/versions/master/api/scala/module.html
@@ -53,7 +53,7 @@
 <link href="index.html" rel="prev" title="MXNet - Scala API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/ndarray.html b/versions/master/api/scala/ndarray.html
index 12776ff..9247b20 100644
--- a/versions/master/api/scala/ndarray.html
+++ b/versions/master/api/scala/ndarray.html
@@ -53,7 +53,7 @@
 <link href="io.html" rel="prev" title="MXNet Scala Data Loading API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/symbol.html b/versions/master/api/scala/symbol.html
index 45f995d..b8a50d5 100644
--- a/versions/master/api/scala/symbol.html
+++ b/versions/master/api/scala/symbol.html
@@ -53,7 +53,7 @@
 <link href="model.html" rel="prev" title="MXNet Scala Model API"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/api/scala/symbol_in_pictures.html b/versions/master/api/scala/symbol_in_pictures.html
index 99d11ef..5bf45ed 100644
--- a/versions/master/api/scala/symbol_in_pictures.html
+++ b/versions/master/api/scala/symbol_in_pictures.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/index.html b/versions/master/architecture/index.html
index 48a5153..11a2a65 100644
--- a/versions/master/architecture/index.html
+++ b/versions/master/architecture/index.html
@@ -52,7 +52,7 @@
 <link href="../get_started/index.html" rel="prev" title="<no title>"/>
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/note_data_loading.html b/versions/master/architecture/note_data_loading.html
index 0295f58..4918577 100644
--- a/versions/master/architecture/note_data_loading.html
+++ b/versions/master/architecture/note_data_loading.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/note_engine.html b/versions/master/architecture/note_engine.html
index 12d3220..0f71c4d 100644
--- a/versions/master/architecture/note_engine.html
+++ b/versions/master/architecture/note_engine.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/note_memory.html b/versions/master/architecture/note_memory.html
index 6636fb8..5b67090 100644
--- a/versions/master/architecture/note_memory.html
+++ b/versions/master/architecture/note_memory.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/overview.html b/versions/master/architecture/overview.html
index aa9e3b3..297da86 100644
--- a/versions/master/architecture/overview.html
+++ b/versions/master/architecture/overview.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/program_model.html b/versions/master/architecture/program_model.html
index f7bea38..8c574dc 100644
--- a/versions/master/architecture/program_model.html
+++ b/versions/master/architecture/program_model.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/release_note_0_9.html b/versions/master/architecture/release_note_0_9.html
index 093a838..d666d21 100644
--- a/versions/master/architecture/release_note_0_9.html
+++ b/versions/master/architecture/release_note_0_9.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/architecture/rnn_interface.html b/versions/master/architecture/rnn_interface.html
index aabad35..7193236 100644
--- a/versions/master/architecture/rnn_interface.html
+++ b/versions/master/architecture/rnn_interface.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/community/contribute.html b/versions/master/community/contribute.html
index 0e0045c..a44b862 100644
--- a/versions/master/community/contribute.html
+++ b/versions/master/community/contribute.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/community/index.html b/versions/master/community/index.html
index cf93f55..a8ebe09 100644
--- a/versions/master/community/index.html
+++ b/versions/master/community/index.html
@@ -48,10 +48,10 @@
 <!-- -->
 <!-- <script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script> -->
 <!-- -->
-<link href="../tutorials/sparse/train.html" rel="prev" title="Train a Linear Regression Model with Sparse Symbols">
+<link href="../tutorials/unsupervised_learning/gan.html" rel="prev" title="Generative Adversarial Network (GAN)">
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/community/mxnet_channels.html b/versions/master/community/mxnet_channels.html
index e2cfdd5..2da4255 100644
--- a/versions/master/community/mxnet_channels.html
+++ b/versions/master/community/mxnet_channels.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/community/powered_by.html b/versions/master/community/powered_by.html
index 95906ae..17f4250 100644
--- a/versions/master/community/powered_by.html
+++ b/versions/master/community/powered_by.html
@@ -50,7 +50,7 @@
 <!-- -->
 <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
 </link></link></head>
-<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
+<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background.png" role="document">
 <div class="content-block"><div class="navbar navbar-fixed-top">
 <div class="container" id="navContainer">
 <div class="innder" id="header-inner">
diff --git a/versions/master/doxygen/annotated.html b/versions/master/doxygen/annotated.html
index 626fc3f..ae85a12 100644
--- a/versions/master/doxygen/annotated.html
+++ b/versions/master/doxygen/annotated.html
@@ -76,7 +76,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </div><!--header-->
 <div class="contents">
 <div class="textblock">Here are the classes, structs, unions and interfaces with brief descriptions:</div><div class="directory">
-<div class="levels">[detail level <span onclick="javascript:toggleLevel(1);">1</span><span onclick="javascript:toggleLevel(2);">2</span><span onclick="javascript:toggleLevel(3);">3</span><span onclick="javascript:toggleLevel(4);">4</span>]</div><table class="directory">
+<div class="levels">[detail level <span onclick="javascript:toggleLevel(1);">1</span><span onclick="javascript:toggleLevel(2);">2</span><span onclick="javascript:toggleLevel(3);">3</span><span onclick="javascript:toggleLevel(4);">4</span><span onclick="javascript:toggleLevel(5);">5</span>]</div><table class="directory">
 <tr class="even" id="row_0_"><td class="entry"><img alt="o" height="22" id="arr_0_" onclick="toggleFolder('0_')" src="ftv2mnode.png" width="16"/><img alt="N" height="22" src="ftv2ns.png" width="24"/><a class="el" href="namespacedmlc.html" target="_self">dmlc</a></td><td class="desc"></td></tr>
 <tr id="row_0_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" id="arr_0_0_" onclick="toggleFolder('0_0_')" src="ftv2mlastnode.png" width="16"/><img alt="N" height="22" src="ftv2ns.png" width="24"/><a class="el" href="namespacedmlc_1_1parameter.html" target="_self">parameter</a></td><td class="desc"></td></tr>
 <tr class="even" id="row_0_0_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt=" " height="22" src="ftv2blank.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html" target="_self">FieldEntry&lt; mxnet::TShape &gt;</a></td><td class="desc"></td></tr>
@@ -87,13 +87,21 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <tr id="row_1_0_1_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf.html" target="_self">UniqueIf</a></td><td class="desc">Helper for non-array type <code>T</code> </ [...]
 <tr class="even" id="row_1_0_1_1_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf_3_01T[]_4.html" target="_self">UniqueIf&lt; T[]&gt;</a></td><td class="desc">Helper f [...]
 <tr id="row_1_0_1_2_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf_3_01T[kSize]_4.html" target="_self">UniqueIf&lt; T[kSize]&gt;</a></td><td class="desc">Helper  [...]
-<tr class="even" id="row_1_0_2_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1LazyAllocArray.html" target="_self">LazyAllocArray</a></td><td class="desc"></td></tr>
-<tr id="row_1_0_3_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1ObjectPool.html" target="_self">ObjectPool</a></td><td class="desc">Object pool for fast allocation and deallocation </td></tr>
-<tr class="even" id="row_1_0_4_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1ObjectPoolAllocatable.html" target="_self">ObjectPoolAllocatable</a></td><td class="desc">Helper trait class for easy allocation and deallocation </td></tr>
-<tr id="row_1_0_5_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1StaticArray.html" target="_self">StaticArray</a></td><td class="desc">Static array. This code is borrowed from struct Shape&lt;ndim&gt;, except that users can specify the type of  [...]
-<tr class="even" id="row_1_0_6_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1csr__indptr__check.html" target="_self">csr_indptr_check</a></td><td class="desc">IndPtr should be non-negative, in non-decreasing order, start with 0 and end with va [...]
-<tr id="row_1_0_7_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1csr__idx__check.html" target="_self">csr_idx_check</a></td><td class="desc">Indices should be non-negative, less than the number of columns and in ascending order per row </td></tr>
-<tr class="even" id="row_1_0_8_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1rsp__idx__check.html" target="_self">rsp_idx_check</a></td><td class="desc">Indices of RSPNDArray should be non-negative, less than the size of first dimension an [...]
+<tr class="even" id="row_1_0_2_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" id="arr_1_0_2_" onclick="toggleFolder('1_0_2_')" src="ftv2mnode.png" width="16"/><img alt="N" height="22" src="ftv2ns.png" width="24"/><a class="el" href="namespacemxnet_1_1common_1_1random.html" target="_self">random</a></td><td class="desc"></td></tr>
+<tr id="row_1_0_2_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator.html" target="_self">RandGenerator</a></td><td class="desc"></td></tr>
+<tr class="even" id="row_1_0_2_1_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" id="arr_1_0_2_1_" onclick="toggleFolder('1_0_2_1_')" src="ftv2mnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01cpu_00_01DType_01_4.htm [...]
+<tr id="row_1_0_2_1_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01cpu_00_01DType_01_4_1_1I [...]
+<tr class="even" id="row_1_0_2_2_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" id="arr_1_0_2_2_" onclick="toggleFolder('1_0_2_2_')" src="ftv2mnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01DType_01_4.htm [...]
+<tr id="row_1_0_2_2_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01DType_01_4_1_1I [...]
+<tr class="even" id="row_1_0_2_3_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" id="arr_1_0_2_3_" onclick="toggleFolder('1_0_2_3_')" src="ftv2mlastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01double_01_ [...]
+<tr id="row_1_0_2_3_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt=" " height="22" src="ftv2blank.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01double_01_4_1_1Imp [...]
+<tr class="even" id="row_1_0_3_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1LazyAllocArray.html" target="_self">LazyAllocArray</a></td><td class="desc"></td></tr>
+<tr id="row_1_0_4_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1common_1_1ObjectPool.html" target="_self">ObjectPool</a></td><td class="desc">Object pool for fast allocation and deallocation </td></tr>
+<tr class="even" id="row_1_0_5_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1ObjectPoolAllocatable.html" target="_self">ObjectPoolAllocatable</a></td><td class="desc">Helper trait class for easy allocation and deallocation </td></tr>
+<tr id="row_1_0_6_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1StaticArray.html" target="_self">StaticArray</a></td><td class="desc">Static array. This code is borrowed from struct Shape&lt;ndim&gt;, except that users can specify the type of  [...]
+<tr class="even" id="row_1_0_7_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1csr__indptr__check.html" target="_self">csr_indptr_check</a></td><td class="desc">IndPtr should be non-negative, in non-decreasing order, start with 0 and end with va [...]
+<tr id="row_1_0_8_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1csr__idx__check.html" target="_self">csr_idx_check</a></td><td class="desc">Indices should be non-negative, less than the number of columns and in ascending order per row </td></tr>
+<tr class="even" id="row_1_0_9_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="structmxnet_1_1common_1_1rsp__idx__check.html" target="_self">rsp_idx_check</a></td><td class="desc">Indices of RSPNDArray should be non-negative, less than the size of first dimension an [...]
 <tr id="row_1_1_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" src="ftv2node.png" width="16"/><img alt="N" height="22" src="ftv2ns.png" width="24"/><a class="el" href="namespacemxnet_1_1csr.html" target="_self">csr</a></td><td class="desc"></td></tr>
 <tr class="even" id="row_1_2_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="o" height="22" id="arr_1_2_" onclick="toggleFolder('1_2_')" src="ftv2mnode.png" width="16"/><img alt="N" height="22" src="ftv2ns.png" width="24"/><a class="el" href="namespacemxnet_1_1engine.html" target="_self">engine</a></td><td class="desc">Namespace of engine internal types </td></tr>
 <tr id="row_1_2_0_"><td class="entry"><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="|" height="22" src="ftv2vertline.png" width="16"/><img alt="\" height="22" src="ftv2lastnode.png" width="16"/><img alt="C" height="22" src="ftv2cl.png" width="24"/><a class="el" href="classmxnet_1_1engine_1_1CallbackOnComplete.html" target="_self">CallbackOnComplete</a></td><td class="desc">OnComplete Callback to the engine, called by AsyncFn when action completes </td></tr>
@@ -145,7 +153,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:18 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/base_8h.html b/versions/master/doxygen/base_8h.html
index 1ee420f..b83eeb4 100644
--- a/versions/master/doxygen/base_8h.html
+++ b/versions/master/doxygen/base_8h.html
@@ -105,7 +105,7 @@ This graph shows which files directly or indirectly include this file:</div>
 <div class="dyncontent">
 <div class="center"><img alt="" border="0" src="base_8h__dep__incl.png" usemap="#include_2mxnet_2base_8hdep"/></div>
 <map id="include_2mxnet_2base_8hdep" name="include_2mxnet_2base_8hdep">
-<area alt="" coords="343,88,499,115" href="engine_8h.html" id="node2" shape="rect" title="Engine that schedules all the operations according to dependency. "/><area alt="" coords="601,171,761,197" href="ndarray_8h.html" id="node3" shape="rect" title="NDArray interface that handles array arithematics. "/><area alt="" coords="184,435,352,461" href="executor_8h.html" id="node4" shape="rect" title="Symbolic executor interface of mxnet. "/><area alt="" coords="411,246,557,287" href="op__attr_ [...]
+<area alt="" coords="485,88,640,115" href="engine_8h.html" id="node2" shape="rect" title="Engine that schedules all the operations according to dependency. "/><area alt="" coords="719,171,880,197" href="ndarray_8h.html" id="node3" shape="rect" title="NDArray interface that handles array arithematics. "/><area alt="" coords="211,435,379,461" href="executor_8h.html" id="node4" shape="rect" title="Symbolic executor interface of mxnet. "/><area alt="" coords="455,246,601,287" href="op__attr_ [...]
 </div>
 </div>
 <p><a href="base_8h_source.html">Go to the source code of this file.</a></p>
@@ -154,7 +154,7 @@ Macros</h2></td></tr>
 <tr class="memitem:aea13dab0ddd61ec142feb0b6759c6acf"><td align="right" class="memItemLeft" valign="top">#define </td><td class="memItemRight" valign="bottom"><a class="el" href="base_8h.html#aea13dab0ddd61ec142feb0b6759c6acf">MXNET_MINOR</a>   0</td></tr>
 <tr class="memdesc:aea13dab0ddd61ec142feb0b6759c6acf"><td class="mdescLeft"> </td><td class="mdescRight">minor version  <a href="#aea13dab0ddd61ec142feb0b6759c6acf">More...</a><br/></td></tr>
 <tr class="separator:aea13dab0ddd61ec142feb0b6759c6acf"><td class="memSeparator" colspan="2"> </td></tr>
-<tr class="memitem:a76b95738a0fa0478cd508f324d2ac49d"><td align="right" class="memItemLeft" valign="top">#define </td><td class="memItemRight" valign="bottom"><a class="el" href="base_8h.html#a76b95738a0fa0478cd508f324d2ac49d">MXNET_PATCH</a>   0</td></tr>
+<tr class="memitem:a76b95738a0fa0478cd508f324d2ac49d"><td align="right" class="memItemLeft" valign="top">#define </td><td class="memItemRight" valign="bottom"><a class="el" href="base_8h.html#a76b95738a0fa0478cd508f324d2ac49d">MXNET_PATCH</a>   1</td></tr>
 <tr class="memdesc:a76b95738a0fa0478cd508f324d2ac49d"><td class="mdescLeft"> </td><td class="mdescRight">patch version  <a href="#a76b95738a0fa0478cd508f324d2ac49d">More...</a><br/></td></tr>
 <tr class="separator:a76b95738a0fa0478cd508f324d2ac49d"><td class="memSeparator" colspan="2"> </td></tr>
 <tr class="memitem:a18d0da52a992a3360eaa481b43094e80"><td align="right" class="memItemLeft" valign="top">#define </td><td class="memItemRight" valign="bottom"><a class="el" href="base_8h.html#a18d0da52a992a3360eaa481b43094e80">MXNET_VERSION</a>   (<a class="el" href="base_8h.html#aee5302444d57230419d0cd77bf72770d">MXNET_MAJOR</a>*10000 + <a class="el" href="base_8h.html#aea13dab0ddd61ec142feb0b6759c6acf">MXNET_MINOR</a>*100 + <a class="el" href="base_8h.html#a76b95738a0fa0478cd508f324d2a [...]
@@ -278,7 +278,7 @@ Typedefs</h2></td></tr>
 <div class="memproto">
 <table class="memname">
 <tr>
-<td class="memname">#define MXNET_PATCH   0</td>
+<td class="memname">#define MXNET_PATCH   1</td>
 </tr>
 </table>
 </div><div class="memdoc">
@@ -377,7 +377,7 @@ Typedefs</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/base_8h__dep__incl.map b/versions/master/doxygen/base_8h__dep__incl.map
index c5edd8b..b46416c 100644
--- a/versions/master/doxygen/base_8h__dep__incl.map
+++ b/versions/master/doxygen/base_8h__dep__incl.map
@@ -1,18 +1,19 @@
 <map id="include/mxnet/base.h" name="include/mxnet/base.h">
-<area shape="rect" id="node2" href="$engine_8h.html" title="Engine that schedules all the operations according to dependency. " alt="" coords="343,88,499,115"/>
-<area shape="rect" id="node3" href="$ndarray_8h.html" title="NDArray interface that handles array arithematics. " alt="" coords="601,171,761,197"/>
-<area shape="rect" id="node4" href="$executor_8h.html" title="Symbolic executor interface of mxnet. " alt="" coords="184,435,352,461"/>
-<area shape="rect" id="node5" href="$op__attr__types_8h.html" title="Additional operator attributes beside the ones provided by NNVM. " alt="" coords="411,246,557,287"/>
-<area shape="rect" id="node6" href="$operator_8h.html" title="Operator interface of mxnet. " alt="" coords="171,344,335,371"/>
+<area shape="rect" id="node2" href="$engine_8h.html" title="Engine that schedules all the operations according to dependency. " alt="" coords="485,88,640,115"/>
+<area shape="rect" id="node3" href="$ndarray_8h.html" title="NDArray interface that handles array arithematics. " alt="" coords="719,171,880,197"/>
+<area shape="rect" id="node4" href="$executor_8h.html" title="Symbolic executor interface of mxnet. " alt="" coords="211,435,379,461"/>
+<area shape="rect" id="node5" href="$op__attr__types_8h.html" title="Additional operator attributes beside the ones provided by NNVM. " alt="" coords="455,246,601,287"/>
+<area shape="rect" id="node6" href="$operator_8h.html" title="Operator interface of mxnet. " alt="" coords="212,344,376,371"/>
 <area shape="rect" id="node7" href="$operator__util_8h.html" title="Utility functions and registries to help quickly build new operators. [Deprecated] Use the register f..." alt="" coords="5,427,159,469"/>
-<area shape="rect" id="node12" href="$io_8h.html" title="mxnet io data structure and data iterator " alt="" coords="920,253,1048,280"/>
-<area shape="rect" id="node14" href="$rtc_8h.html" title="include/mxnet/rtc.h" alt="" coords="1072,253,1205,280"/>
-<area shape="rect" id="node15" href="$resource_8h.html" title="Global resource allocation handling. " alt="" coords="221,171,389,197"/>
-<area shape="rect" id="node16" href="$storage_8h.html" title="Storage manager across multiple devices. " alt="" coords="575,88,736,115"/>
-<area shape="rect" id="node17" href="$tensor__blob_8h.html" title="TBlob class that holds common representation of arbirary dimension tensor, can be used to transformed..." alt="" coords="914,81,1056,122"/>
-<area shape="rect" id="node10" href="$utils_8h.html" title="Basic utilility functions. " alt="" coords="493,435,627,461"/>
-<area shape="rect" id="node9" href="$imperative_8h.html" title="include/mxnet/imperative.h" alt="" coords="743,435,921,461"/>
-<area shape="rect" id="node13" href="$kvstore_8h.html" title="key&#45;value store interface for mxnet " alt="" coords="683,253,844,280"/>
-<area shape="rect" id="node8" href="$graph__attr__types_8h.html" title="Data structures that can appear in graph attributes. " alt="" coords="656,337,795,378"/>
-<area shape="rect" id="node11" href="$exec__utils_8h.html" title="Common utility functions for executors. " alt="" coords="476,517,644,544"/>
+<area shape="rect" id="node12" href="$io_8h.html" title="mxnet io data structure and data iterator " alt="" coords="964,253,1092,280"/>
+<area shape="rect" id="node14" href="$rtc_8h.html" title="include/mxnet/rtc.h" alt="" coords="1116,253,1249,280"/>
+<area shape="rect" id="node15" href="$resource_8h.html" title="Global resource allocation handling. " alt="" coords="217,171,385,197"/>
+<area shape="rect" id="node16" href="$storage_8h.html" title="Storage manager across multiple devices. " alt="" coords="767,88,928,115"/>
+<area shape="rect" id="node17" href="$random__generator_8h.html" title="Parallel random number generator. " alt="" coords="196,88,409,115"/>
+<area shape="rect" id="node18" href="$tensor__blob_8h.html" title="TBlob class that holds common representation of arbirary dimension tensor, can be used to transformed..." alt="" coords="1055,81,1197,122"/>
+<area shape="rect" id="node10" href="$utils_8h.html" title="Basic utilility functions. " alt="" coords="537,435,671,461"/>
+<area shape="rect" id="node9" href="$imperative_8h.html" title="include/mxnet/imperative.h" alt="" coords="787,435,965,461"/>
+<area shape="rect" id="node13" href="$kvstore_8h.html" title="key&#45;value store interface for mxnet " alt="" coords="727,253,888,280"/>
+<area shape="rect" id="node8" href="$graph__attr__types_8h.html" title="Data structures that can appear in graph attributes. " alt="" coords="700,337,839,378"/>
+<area shape="rect" id="node11" href="$exec__utils_8h.html" title="Common utility functions for executors. " alt="" coords="520,517,688,544"/>
 </map>
diff --git a/versions/master/doxygen/base_8h__dep__incl.md5 b/versions/master/doxygen/base_8h__dep__incl.md5
index e9ce1e3..e2f19e3 100644
--- a/versions/master/doxygen/base_8h__dep__incl.md5
+++ b/versions/master/doxygen/base_8h__dep__incl.md5
@@ -1 +1 @@
-273723c23be254627459033e3e0b7ac2
\ No newline at end of file
+c847f6a0336c0b800cabb9cdd7cc2148
\ No newline at end of file
diff --git a/versions/master/doxygen/base_8h__dep__incl.png b/versions/master/doxygen/base_8h__dep__incl.png
index 237dbeb..693f538 100644
Binary files a/versions/master/doxygen/base_8h__dep__incl.png and b/versions/master/doxygen/base_8h__dep__incl.png differ
diff --git a/versions/master/doxygen/base_8h_source.html b/versions/master/doxygen/base_8h_source.html
index 3079638..9a9dc4b 100644
--- a/versions/master/doxygen/base_8h_source.html
+++ b/versions/master/doxygen/base_8h_source.html
@@ -160,7 +160,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="line"><a name="l00113"></a><span class="lineno">  113</span> <span class="preprocessor"></span></div>
 <div class="line"><a name="l00114"></a><span class="lineno"><a class="line" href="base_8h.html#aea13dab0ddd61ec142feb0b6759c6acf">  114</a></span> <span class="preprocessor">#define MXNET_MINOR 0</span></div>
 <div class="line"><a name="l00115"></a><span class="lineno">  115</span> <span class="preprocessor"></span></div>
-<div class="line"><a name="l00116"></a><span class="lineno"><a class="line" href="base_8h.html#a76b95738a0fa0478cd508f324d2ac49d">  116</a></span> <span class="preprocessor">#define MXNET_PATCH 0</span></div>
+<div class="line"><a name="l00116"></a><span class="lineno"><a class="line" href="base_8h.html#a76b95738a0fa0478cd508f324d2ac49d">  116</a></span> <span class="preprocessor">#define MXNET_PATCH 1</span></div>
 <div class="line"><a name="l00117"></a><span class="lineno">  117</span> <span class="preprocessor"></span></div>
 <div class="line"><a name="l00118"></a><span class="lineno"><a class="line" href="base_8h.html#a18d0da52a992a3360eaa481b43094e80">  118</a></span> <span class="preprocessor">#define MXNET_VERSION (MXNET_MAJOR*10000 + MXNET_MINOR*100 + MXNET_PATCH)</span></div>
 <div class="line"><a name="l00119"></a><span class="lineno">  119</span> <span class="preprocessor"></span></div>
@@ -367,7 +367,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </div><!-- fragment --></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/c__api_8h.html b/versions/master/doxygen/c__api_8h.html
index 0d554dd..ba8b041 100644
--- a/versions/master/doxygen/c__api_8h.html
+++ b/versions/master/doxygen/c__api_8h.html
@@ -482,6 +482,9 @@ Functions</h2></td></tr>
 <tr class="memitem:aedf1da726aaacb87a5d959ed83f03f41"><td align="right" class="memItemLeft" valign="top"><a class="el" href="c__predict__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> int </td><td class="memItemRight" valign="bottom"><a class="el" href="c__api_8h.html#aedf1da726aaacb87a5d959ed83f03f41">MXSymbolListOutputs</a> (<a class="el" href="c__api_8h.html#a301e9a7722b326126defdd39ccd99b37">SymbolHandle</a> symbol, <a class="el" href="c__api_8h.html#a796e7ef8260ef40b24 [...]
 <tr class="memdesc:aedf1da726aaacb87a5d959ed83f03f41"><td class="mdescLeft"> </td><td class="mdescRight">List returns in the symbol.  <a href="#aedf1da726aaacb87a5d959ed83f03f41">More...</a><br/></td></tr>
 <tr class="separator:aedf1da726aaacb87a5d959ed83f03f41"><td class="memSeparator" colspan="2"> </td></tr>
+<tr class="memitem:a970b94e3dfb72a2a76065149180c4f1f"><td align="right" class="memItemLeft" valign="top"><a class="el" href="c__predict__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> int </td><td class="memItemRight" valign="bottom"><a class="el" href="c__api_8h.html#a970b94e3dfb72a2a76065149180c4f1f">MXSymbolGetNumOutputs</a> (<a class="el" href="c__api_8h.html#a301e9a7722b326126defdd39ccd99b37">SymbolHandle</a> symbol, <a class="el" href="c__api_8h.html#a796e7ef8260ef40b [...]
+<tr class="memdesc:a970b94e3dfb72a2a76065149180c4f1f"><td class="mdescLeft"> </td><td class="mdescRight">Get number of outputs of the symbol.  <a href="#a970b94e3dfb72a2a76065149180c4f1f">More...</a><br/></td></tr>
+<tr class="separator:a970b94e3dfb72a2a76065149180c4f1f"><td class="memSeparator" colspan="2"> </td></tr>
 <tr class="memitem:a13816872b73d6c474686d985747e8c65"><td align="right" class="memItemLeft" valign="top"><a class="el" href="c__predict__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> int </td><td class="memItemRight" valign="bottom"><a class="el" href="c__api_8h.html#a13816872b73d6c474686d985747e8c65">MXSymbolGetInternals</a> (<a class="el" href="c__api_8h.html#a301e9a7722b326126defdd39ccd99b37">SymbolHandle</a> symbol, <a class="el" href="c__api_8h.html#a301e9a7722b326126 [...]
 <tr class="memdesc:a13816872b73d6c474686d985747e8c65"><td class="mdescLeft"> </td><td class="mdescRight">Get a symbol that contains all the internals.  <a href="#a13816872b73d6c474686d985747e8c65">More...</a><br/></td></tr>
 <tr class="separator:a13816872b73d6c474686d985747e8c65"><td class="memSeparator" colspan="2"> </td></tr>
@@ -7315,6 +7318,40 @@ Functions</h2></td></tr>
 <dl class="section return"><dt>Returns</dt><dd>0 when success, -1 when failure happens </dd></dl>
 </div>
 </div>
+<a class="anchor" id="a970b94e3dfb72a2a76065149180c4f1f"></a>
+<div class="memitem">
+<div class="memproto">
+<table class="memname">
+<tr>
+<td class="memname"><a class="el" href="c__predict__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> int MXSymbolGetNumOutputs </td>
+<td>(</td>
+<td class="paramtype"><a class="el" href="c__api_8h.html#a301e9a7722b326126defdd39ccd99b37">SymbolHandle</a> </td>
+<td class="paramname"><em>symbol</em>, </td>
+</tr>
+<tr>
+<td class="paramkey"></td>
+<td></td>
+<td class="paramtype"><a class="el" href="c__api_8h.html#a796e7ef8260ef40b24af7d1082aa2d84">mx_uint</a> * </td>
+<td class="paramname"><em>output_count</em> </td>
+</tr>
+<tr>
+<td></td>
+<td>)</td>
+<td></td><td></td>
+</tr>
+</table>
+</div><div class="memdoc">
+<p>Get number of outputs of the symbol. </p>
+<dl class="params"><dt>Parameters</dt><dd>
+<table class="params">
+<tr><td class="paramname">symbol</td><td>The symbol </td></tr>
+<tr><td class="paramname">out_size</td><td>number of outputs </td></tr>
+</table>
+</dd>
+</dl>
+<dl class="section return"><dt>Returns</dt><dd>0 when success, -1 when failure happens </dd></dl>
+</div>
+</div>
 <a class="anchor" id="a8e601320c59679d9423179f23f281bf2"></a>
 <div class="memitem">
 <div class="memproto">
@@ -8146,7 +8183,7 @@ Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/c__api_8h_source.html b/versions/master/doxygen/c__api_8h_source.html
index 7841426..db93d4f 100644
--- a/versions/master/doxygen/c__api_8h_source.html
+++ b/versions/master/doxygen/c__api_8h_source.html
@@ -516,431 +516,436 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="line"><a name="l01051"></a><span class="lineno"> 1051</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aedf1da726aaacb87a5d959ed83f03f41">MXSymbolListOutputs</a>(SymbolHandle symbol,</div>
 <div class="line"><a name="l01052"></a><span class="lineno"> 1052</span>                                   mx_uint *out_size,</div>
 <div class="line"><a name="l01053"></a><span class="lineno"> 1053</span>                                   <span class="keyword">const</span> <span class="keywordtype">char</span> ***out_str_array);</div>
-<div class="line"><a name="l01060"></a><span class="lineno"> 1060</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a13816872b73d6c474686d985747e8c65">MXSymbolGetInternals</a>(SymbolHandle symbol,</div>
-<div class="line"><a name="l01061"></a><span class="lineno"> 1061</span>                                    SymbolHandle *out);</div>
-<div class="line"><a name="l01068"></a><span class="lineno"> 1068</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a6050b4cfcb75ed3d0a44afb1b8df45cd">MXSymbolGetChildren</a>(SymbolHandle symbol,</div>
-<div class="line"><a name="l01069"></a><span class="lineno"> 1069</span>                                   SymbolHandle *out);</div>
-<div class="line"><a name="l01077"></a><span class="lineno"> 1077</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a8e601320c59679d9423179f23f281bf2">MXSymbolGetOutput</a>(SymbolHandle symbol,</div>
-<div class="line"><a name="l01078"></a><span class="lineno"> 1078</span>                                 mx_uint index,</div>
-<div class="line"><a name="l01079"></a><span class="lineno"> 1079</span>                                 SymbolHandle *out);</div>
-<div class="line"><a name="l01087"></a><span class="lineno"> 1087</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a38e75c5a41473912e2d6cdfececba84d">MXSymbolListAuxiliaryStates</a>(SymbolHandle symbol,</div>
-<div class="line"><a name="l01088"></a><span class="lineno"> 1088</span>                                           mx_uint *out_size,</div>
-<div class="line"><a name="l01089"></a><span class="lineno"> 1089</span>                                           <span class="keyword">const</span> <span class="keywordtype">char</span> ***out_str_array);</div>
-<div class="line"><a name="l01104"></a><span class="lineno"> 1104</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a879a5ee991393d364b787e948b4c9961">MXSymbolCompose</a>(SymbolHandle sym,</div>
-<div class="line"><a name="l01105"></a><span class="lineno"> 1105</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span> *name,</div>
-<div class="line"><a name="l01106"></a><span class="lineno"> 1106</span>                               mx_uint num_args,</div>
-<div class="line"><a name="l01107"></a><span class="lineno"> 1107</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01108"></a><span class="lineno"> 1108</span>                               SymbolHandle* args);</div>
-<div class="line"><a name="l01118"></a><span class="lineno"> 1118</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ac2268c0bc7ad2f96d36bbbdab9a51c2e">MXSymbolGrad</a>(SymbolHandle sym,</div>
-<div class="line"><a name="l01119"></a><span class="lineno"> 1119</span>                            mx_uint num_wrt,</div>
-<div class="line"><a name="l01120"></a><span class="lineno"> 1120</span>                            <span class="keyword">const</span> <span class="keywordtype">char</span>** wrt,</div>
-<div class="line"><a name="l01121"></a><span class="lineno"> 1121</span>                            SymbolHandle* out);</div>
-<div class="line"><a name="l01144"></a><span class="lineno"> 1144</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#afaf03324563098f1ac4c23ecfc88b784">MXSymbolInferShape</a>(SymbolHandle sym,</div>
-<div class="line"><a name="l01145"></a><span class="lineno"> 1145</span>                                  mx_uint num_args,</div>
-<div class="line"><a name="l01146"></a><span class="lineno"> 1146</span>                                  <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01147"></a><span class="lineno"> 1147</span>                                  <span class="keyword">const</span> mx_uint *arg_ind_ptr,</div>
-<div class="line"><a name="l01148"></a><span class="lineno"> 1148</span>                                  <span class="keyword">const</span> mx_uint *arg_shape_data,</div>
-<div class="line"><a name="l01149"></a><span class="lineno"> 1149</span>                                  mx_uint *in_shape_size,</div>
-<div class="line"><a name="l01150"></a><span class="lineno"> 1150</span>                                  <span class="keyword">const</span> mx_uint **in_shape_ndim,</div>
-<div class="line"><a name="l01151"></a><span class="lineno"> 1151</span>                                  <span class="keyword">const</span> mx_uint ***in_shape_data,</div>
-<div class="line"><a name="l01152"></a><span class="lineno"> 1152</span>                                  mx_uint *out_shape_size,</div>
-<div class="line"><a name="l01153"></a><span class="lineno"> 1153</span>                                  <span class="keyword">const</span> mx_uint **out_shape_ndim,</div>
-<div class="line"><a name="l01154"></a><span class="lineno"> 1154</span>                                  <span class="keyword">const</span> mx_uint ***out_shape_data,</div>
-<div class="line"><a name="l01155"></a><span class="lineno"> 1155</span>                                  mx_uint *aux_shape_size,</div>
-<div class="line"><a name="l01156"></a><span class="lineno"> 1156</span>                                  <span class="keyword">const</span> mx_uint **aux_shape_ndim,</div>
-<div class="line"><a name="l01157"></a><span class="lineno"> 1157</span>                                  <span class="keyword">const</span> mx_uint ***aux_shape_data,</div>
-<div class="line"><a name="l01158"></a><span class="lineno"> 1158</span>                                  <span class="keywordtype">int</span> *complete);</div>
-<div class="line"><a name="l01183"></a><span class="lineno"> 1183</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a664c85b47c69b81be3b9ea1dfcb72a94">MXSymbolInferShapePartial</a>(SymbolHandle sym,</div>
-<div class="line"><a name="l01184"></a><span class="lineno"> 1184</span>                                         mx_uint num_args,</div>
-<div class="line"><a name="l01185"></a><span class="lineno"> 1185</span>                                         <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01186"></a><span class="lineno"> 1186</span>                                         <span class="keyword">const</span> mx_uint *arg_ind_ptr,</div>
-<div class="line"><a name="l01187"></a><span class="lineno"> 1187</span>                                         <span class="keyword">const</span> mx_uint *arg_shape_data,</div>
-<div class="line"><a name="l01188"></a><span class="lineno"> 1188</span>                                         mx_uint *in_shape_size,</div>
-<div class="line"><a name="l01189"></a><span class="lineno"> 1189</span>                                         <span class="keyword">const</span> mx_uint **in_shape_ndim,</div>
-<div class="line"><a name="l01190"></a><span class="lineno"> 1190</span>                                         <span class="keyword">const</span> mx_uint ***in_shape_data,</div>
-<div class="line"><a name="l01191"></a><span class="lineno"> 1191</span>                                         mx_uint *out_shape_size,</div>
-<div class="line"><a name="l01192"></a><span class="lineno"> 1192</span>                                         <span class="keyword">const</span> mx_uint **out_shape_ndim,</div>
-<div class="line"><a name="l01193"></a><span class="lineno"> 1193</span>                                         <span class="keyword">const</span> mx_uint ***out_shape_data,</div>
-<div class="line"><a name="l01194"></a><span class="lineno"> 1194</span>                                         mx_uint *aux_shape_size,</div>
-<div class="line"><a name="l01195"></a><span class="lineno"> 1195</span>                                         <span class="keyword">const</span> mx_uint **aux_shape_ndim,</div>
-<div class="line"><a name="l01196"></a><span class="lineno"> 1196</span>                                         <span class="keyword">const</span> mx_uint ***aux_shape_data,</div>
-<div class="line"><a name="l01197"></a><span class="lineno"> 1197</span>                                         <span class="keywordtype">int</span> *complete);</div>
-<div class="line"><a name="l01198"></a><span class="lineno"> 1198</span> </div>
-<div class="line"><a name="l01217"></a><span class="lineno"> 1217</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1eff7ea3fd2fe56509c75f5a610890e6">MXSymbolInferType</a>(SymbolHandle sym,</div>
-<div class="line"><a name="l01218"></a><span class="lineno"> 1218</span>                                 mx_uint num_args,</div>
-<div class="line"><a name="l01219"></a><span class="lineno"> 1219</span>                                 <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01220"></a><span class="lineno"> 1220</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> *arg_type_data,</div>
-<div class="line"><a name="l01221"></a><span class="lineno"> 1221</span>                                 mx_uint *in_type_size,</div>
-<div class="line"><a name="l01222"></a><span class="lineno"> 1222</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> **in_type_data,</div>
-<div class="line"><a name="l01223"></a><span class="lineno"> 1223</span>                                 mx_uint *out_type_size,</div>
-<div class="line"><a name="l01224"></a><span class="lineno"> 1224</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> **out_type_data,</div>
-<div class="line"><a name="l01225"></a><span class="lineno"> 1225</span>                                 mx_uint *aux_type_size,</div>
-<div class="line"><a name="l01226"></a><span class="lineno"> 1226</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> **aux_type_data,</div>
-<div class="line"><a name="l01227"></a><span class="lineno"> 1227</span>                                 <span class="keywordtype">int</span> *complete);</div>
-<div class="line"><a name="l01228"></a><span class="lineno"> 1228</span> </div>
-<div class="line"><a name="l01229"></a><span class="lineno"> 1229</span> </div>
-<div class="line"><a name="l01230"></a><span class="lineno"> 1230</span> </div>
-<div class="line"><a name="l01231"></a><span class="lineno"> 1231</span> </div>
-<div class="line"><a name="l01232"></a><span class="lineno"> 1232</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01233"></a><span class="lineno"> 1233</span> <span class="comment">// Part 4: Executor interface</span></div>
-<div class="line"><a name="l01234"></a><span class="lineno"> 1234</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01240"></a><span class="lineno"> 1240</span> <span class="comment"></span><a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ade1487656dfba3cae12c5bf676e08dcf">MXExecutorFree</a>(ExecutorHandle handle);</div>
-<div class="line"><a name="l01247"></a><span class="lineno"> 1247</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ac9f776a6e7be0bacda4dcd3efa5f7dc5">MXExecutorPrint</a>(ExecutorHandle handle, <span class="keyword">const</span> <span class="keywordtype">char</span> **out_str);</div>
-<div class="line"><a name="l01255"></a><span class="lineno"> 1255</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1f9987e701b810e0e79b5207fa215415">MXExecutorForward</a>(ExecutorHandle handle, <span class="keywordtype">int</span> is_train);</div>
-<div class="line"><a name="l01265"></a><span class="lineno"> 1265</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ad8f92bb15e11058abe12959cfc4dcbb3">MXExecutorBackward</a>(ExecutorHandle handle,</div>
-<div class="line"><a name="l01266"></a><span class="lineno"> 1266</span>                                  mx_uint len,</div>
-<div class="line"><a name="l01267"></a><span class="lineno"> 1267</span>                                  NDArrayHandle *head_grads);</div>
-<div class="line"><a name="l01278"></a><span class="lineno"> 1278</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a3a211f3e0c6270271ce4fe79228dd0c2">MXExecutorBackwardEx</a>(ExecutorHandle handle,</div>
-<div class="line"><a name="l01279"></a><span class="lineno"> 1279</span>                                    mx_uint len,</div>
-<div class="line"><a name="l01280"></a><span class="lineno"> 1280</span>                                    NDArrayHandle *head_grads,</div>
-<div class="line"><a name="l01281"></a><span class="lineno"> 1281</span>                                    <span class="keywordtype">int</span> is_train);</div>
-<div class="line"><a name="l01290"></a><span class="lineno"> 1290</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a442845c8ef89d4c849f468ea2b7717f7">MXExecutorOutputs</a>(ExecutorHandle handle,</div>
-<div class="line"><a name="l01291"></a><span class="lineno"> 1291</span>                                 mx_uint *out_size,</div>
-<div class="line"><a name="l01292"></a><span class="lineno"> 1292</span>                                 NDArrayHandle **out);</div>
-<div class="line"><a name="l01293"></a><span class="lineno"> 1293</span> </div>
-<div class="line"><a name="l01309"></a><span class="lineno"> 1309</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a10a6deacde18b7d52b44eee0ddddf0e1">MXExecutorBind</a>(SymbolHandle symbol_handle,</div>
-<div class="line"><a name="l01310"></a><span class="lineno"> 1310</span>                              <span class="keywordtype">int</span> dev_type,</div>
-<div class="line"><a name="l01311"></a><span class="lineno"> 1311</span>                              <span class="keywordtype">int</span> dev_id,</div>
-<div class="line"><a name="l01312"></a><span class="lineno"> 1312</span>                              mx_uint len,</div>
-<div class="line"><a name="l01313"></a><span class="lineno"> 1313</span>                              NDArrayHandle *in_args,</div>
-<div class="line"><a name="l01314"></a><span class="lineno"> 1314</span>                              NDArrayHandle *arg_grad_store,</div>
-<div class="line"><a name="l01315"></a><span class="lineno"> 1315</span>                              mx_uint *grad_req_type,</div>
-<div class="line"><a name="l01316"></a><span class="lineno"> 1316</span>                              mx_uint aux_states_len,</div>
-<div class="line"><a name="l01317"></a><span class="lineno"> 1317</span>                              NDArrayHandle *aux_states,</div>
-<div class="line"><a name="l01318"></a><span class="lineno"> 1318</span>                              ExecutorHandle *out);</div>
-<div class="line"><a name="l01340"></a><span class="lineno"> 1340</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ad1646370458f8a3ddb9e4f3365c5e510">MXExecutorBindX</a>(SymbolHandle symbol_handle,</div>
-<div class="line"><a name="l01341"></a><span class="lineno"> 1341</span>                               <span class="keywordtype">int</span> dev_type,</div>
-<div class="line"><a name="l01342"></a><span class="lineno"> 1342</span>                               <span class="keywordtype">int</span> dev_id,</div>
-<div class="line"><a name="l01343"></a><span class="lineno"> 1343</span>                               mx_uint num_map_keys,</div>
-<div class="line"><a name="l01344"></a><span class="lineno"> 1344</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** map_keys,</div>
-<div class="line"><a name="l01345"></a><span class="lineno"> 1345</span>                               <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_types,</div>
-<div class="line"><a name="l01346"></a><span class="lineno"> 1346</span>                               <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_ids,</div>
-<div class="line"><a name="l01347"></a><span class="lineno"> 1347</span>                               mx_uint len,</div>
-<div class="line"><a name="l01348"></a><span class="lineno"> 1348</span>                               NDArrayHandle *in_args,</div>
-<div class="line"><a name="l01349"></a><span class="lineno"> 1349</span>                               NDArrayHandle *arg_grad_store,</div>
-<div class="line"><a name="l01350"></a><span class="lineno"> 1350</span>                               mx_uint *grad_req_type,</div>
-<div class="line"><a name="l01351"></a><span class="lineno"> 1351</span>                               mx_uint aux_states_len,</div>
-<div class="line"><a name="l01352"></a><span class="lineno"> 1352</span>                               NDArrayHandle *aux_states,</div>
-<div class="line"><a name="l01353"></a><span class="lineno"> 1353</span>                               ExecutorHandle *out);</div>
-<div class="line"><a name="l01376"></a><span class="lineno"> 1376</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a8139e691c88ebcf082f9e954598e0e86">MXExecutorBindEX</a>(SymbolHandle symbol_handle,</div>
-<div class="line"><a name="l01377"></a><span class="lineno"> 1377</span>                                <span class="keywordtype">int</span> dev_type,</div>
-<div class="line"><a name="l01378"></a><span class="lineno"> 1378</span>                                <span class="keywordtype">int</span> dev_id,</div>
-<div class="line"><a name="l01379"></a><span class="lineno"> 1379</span>                                mx_uint num_map_keys,</div>
-<div class="line"><a name="l01380"></a><span class="lineno"> 1380</span>                                <span class="keyword">const</span> <span class="keywordtype">char</span>** map_keys,</div>
-<div class="line"><a name="l01381"></a><span class="lineno"> 1381</span>                                <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_types,</div>
-<div class="line"><a name="l01382"></a><span class="lineno"> 1382</span>                                <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_ids,</div>
-<div class="line"><a name="l01383"></a><span class="lineno"> 1383</span>                                mx_uint len,</div>
-<div class="line"><a name="l01384"></a><span class="lineno"> 1384</span>                                NDArrayHandle *in_args,</div>
-<div class="line"><a name="l01385"></a><span class="lineno"> 1385</span>                                NDArrayHandle *arg_grad_store,</div>
-<div class="line"><a name="l01386"></a><span class="lineno"> 1386</span>                                mx_uint *grad_req_type,</div>
-<div class="line"><a name="l01387"></a><span class="lineno"> 1387</span>                                mx_uint aux_states_len,</div>
-<div class="line"><a name="l01388"></a><span class="lineno"> 1388</span>                                NDArrayHandle *aux_states,</div>
-<div class="line"><a name="l01389"></a><span class="lineno"> 1389</span>                                ExecutorHandle shared_exec,</div>
-<div class="line"><a name="l01390"></a><span class="lineno"> 1390</span>                                ExecutorHandle *out);</div>
-<div class="line"><a name="l01391"></a><span class="lineno"> 1391</span> </div>
-<div class="line"><a name="l01392"></a><span class="lineno"> 1392</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ae8bc5c6380c5c7aaa6c8c6b884482e29">MXExecutorSimpleBind</a>(SymbolHandle symbol_handle,</div>
-<div class="line"><a name="l01393"></a><span class="lineno"> 1393</span>                                    <span class="keywordtype">int</span> dev_type,</div>
-<div class="line"><a name="l01394"></a><span class="lineno"> 1394</span>                                    <span class="keywordtype">int</span> dev_id,</div>
-<div class="line"><a name="l01395"></a><span class="lineno"> 1395</span>                                    <span class="keyword">const</span> mx_uint num_g2c_keys,</div>
-<div class="line"><a name="l01396"></a><span class="lineno"> 1396</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** g2c_keys,</div>
-<div class="line"><a name="l01397"></a><span class="lineno"> 1397</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* g2c_dev_types,</div>
-<div class="line"><a name="l01398"></a><span class="lineno"> 1398</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* g2c_dev_ids,</div>
-<div class="line"><a name="l01399"></a><span class="lineno"> 1399</span>                                    <span class="keyword">const</span> mx_uint provided_grad_req_list_len,</div>
-<div class="line"><a name="l01400"></a><span class="lineno"> 1400</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_grad_req_names,</div>
-<div class="line"><a name="l01401"></a><span class="lineno"> 1401</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_grad_req_types,</div>
-<div class="line"><a name="l01402"></a><span class="lineno"> 1402</span>                                    <span class="keyword">const</span> mx_uint num_provided_arg_shapes,</div>
-<div class="line"><a name="l01403"></a><span class="lineno"> 1403</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_arg_shape_names,</div>
-<div class="line"><a name="l01404"></a><span class="lineno"> 1404</span>                                    <span class="keyword">const</span> mx_uint* provided_arg_shape_data,</div>
-<div class="line"><a name="l01405"></a><span class="lineno"> 1405</span>                                    <span class="keyword">const</span> mx_uint* provided_arg_shape_idx,</div>
-<div class="line"><a name="l01406"></a><span class="lineno"> 1406</span>                                    <span class="keyword">const</span> mx_uint num_provided_arg_dtypes,</div>
-<div class="line"><a name="l01407"></a><span class="lineno"> 1407</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_arg_dtype_names,</div>
-<div class="line"><a name="l01408"></a><span class="lineno"> 1408</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* provided_arg_dtypes,</div>
-<div class="line"><a name="l01409"></a><span class="lineno"> 1409</span>                                    <span class="keyword">const</span> mx_uint num_provided_arg_stypes,</div>
-<div class="line"><a name="l01410"></a><span class="lineno"> 1410</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_arg_stype_names,</div>
-<div class="line"><a name="l01411"></a><span class="lineno"> 1411</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* provided_arg_stypes,</div>
-<div class="line"><a name="l01412"></a><span class="lineno"> 1412</span>                                    <span class="keyword">const</span> mx_uint num_shared_arg_names,</div>
-<div class="line"><a name="l01413"></a><span class="lineno"> 1413</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** shared_arg_name_list,</div>
-<div class="line"><a name="l01414"></a><span class="lineno"> 1414</span>                                    <span class="keywordtype">int</span>* shared_buffer_len,</div>
-<div class="line"><a name="l01415"></a><span class="lineno"> 1415</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** shared_buffer_name_list,</div>
-<div class="line"><a name="l01416"></a><span class="lineno"> 1416</span>                                    NDArrayHandle* shared_buffer_handle_list,</div>
-<div class="line"><a name="l01417"></a><span class="lineno"> 1417</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>*** updated_shared_buffer_name_list,</div>
-<div class="line"><a name="l01418"></a><span class="lineno"> 1418</span>                                    NDArrayHandle** updated_shared_buffer_handle_list,</div>
-<div class="line"><a name="l01419"></a><span class="lineno"> 1419</span>                                    mx_uint* num_in_args,</div>
-<div class="line"><a name="l01420"></a><span class="lineno"> 1420</span>                                    NDArrayHandle** in_args,</div>
-<div class="line"><a name="l01421"></a><span class="lineno"> 1421</span>                                    NDArrayHandle** arg_grads,</div>
-<div class="line"><a name="l01422"></a><span class="lineno"> 1422</span>                                    mx_uint* num_aux_states,</div>
-<div class="line"><a name="l01423"></a><span class="lineno"> 1423</span>                                    NDArrayHandle** aux_states,</div>
-<div class="line"><a name="l01424"></a><span class="lineno"> 1424</span>                                    ExecutorHandle shared_exec_handle,</div>
-<div class="line"><a name="l01425"></a><span class="lineno"> 1425</span>                                    ExecutorHandle* out);</div>
-<div class="line"><a name="l01429"></a><span class="lineno"> 1429</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#abae91a5d7847020e14395e078144489a">MXExecutorSetMonitorCallback</a>(ExecutorHandle handle,</div>
-<div class="line"><a name="l01430"></a><span class="lineno"> 1430</span>                                            <a class="code" href="c__api_8h.html#aa7071c027141653df82d2b72db566acd">ExecutorMonitorCallback</a> callback,</div>
-<div class="line"><a name="l01431"></a><span class="lineno"> 1431</span>                                            <span class="keywordtype">void</span>* callback_handle);</div>
-<div class="line"><a name="l01432"></a><span class="lineno"> 1432</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01433"></a><span class="lineno"> 1433</span> <span class="comment">// Part 5: IO Interface</span></div>
-<div class="line"><a name="l01434"></a><span class="lineno"> 1434</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01441"></a><span class="lineno"> 1441</span> <span class="comment"></span><a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa13fb35019b0c42dcfee185b2f9e09d0">MXListDataIters</a>(mx_uint *out_size,</div>
-<div class="line"><a name="l01442"></a><span class="lineno"> 1442</span>                               DataIterCreator **out_array);</div>
-<div class="line"><a name="l01453"></a><span class="lineno"> 1453</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a361c4a5360734e430c54a754cc581354">MXDataIterCreateIter</a>(DataIterCreator handle,</div>
-<div class="line"><a name="l01454"></a><span class="lineno"> 1454</span>                                    mx_uint num_param,</div>
-<div class="line"><a name="l01455"></a><span class="lineno"> 1455</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span> **keys,</div>
-<div class="line"><a name="l01456"></a><span class="lineno"> 1456</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span> **vals,</div>
-<div class="line"><a name="l01457"></a><span class="lineno"> 1457</span>                                    DataIterHandle *out);</div>
-<div class="line"><a name="l01469"></a><span class="lineno"> 1469</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a795614eb68a4fead826202a5b5ba9517">MXDataIterGetIterInfo</a>(DataIterCreator creator,</div>
-<div class="line"><a name="l01470"></a><span class="lineno"> 1470</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> **name,</div>
-<div class="line"><a name="l01471"></a><span class="lineno"> 1471</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> **description,</div>
-<div class="line"><a name="l01472"></a><span class="lineno"> 1472</span>                                     mx_uint *num_args,</div>
-<div class="line"><a name="l01473"></a><span class="lineno"> 1473</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> ***arg_names,</div>
-<div class="line"><a name="l01474"></a><span class="lineno"> 1474</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> ***arg_type_infos,</div>
-<div class="line"><a name="l01475"></a><span class="lineno"> 1475</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> ***arg_descriptions);</div>
-<div class="line"><a name="l01481"></a><span class="lineno"> 1481</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a29e117922acfbc95828433e736bcc3af">MXDataIterFree</a>(DataIterHandle handle);</div>
-<div class="line"><a name="l01488"></a><span class="lineno"> 1488</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a8be0adeef4b87243a5eda12f17e64e67">MXDataIterNext</a>(DataIterHandle handle,</div>
-<div class="line"><a name="l01489"></a><span class="lineno"> 1489</span>                              <span class="keywordtype">int</span> *out);</div>
-<div class="line"><a name="l01495"></a><span class="lineno"> 1495</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a21df19f3242a68d1c536783fcf35968a">MXDataIterBeforeFirst</a>(DataIterHandle handle);</div>
-<div class="line"><a name="l01496"></a><span class="lineno"> 1496</span> </div>
-<div class="line"><a name="l01503"></a><span class="lineno"> 1503</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a0920d095f423b8bb34095406ea93a697">MXDataIterGetData</a>(DataIterHandle handle,</div>
-<div class="line"><a name="l01504"></a><span class="lineno"> 1504</span>                                 NDArrayHandle *out);</div>
-<div class="line"><a name="l01512"></a><span class="lineno"> 1512</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#af1b36a78dd74707c10d31c39c4d429b5">MXDataIterGetIndex</a>(DataIterHandle handle,</div>
-<div class="line"><a name="l01513"></a><span class="lineno"> 1513</span>                                  uint64_t **out_index,</div>
-<div class="line"><a name="l01514"></a><span class="lineno"> 1514</span>                                  uint64_t *out_size);</div>
-<div class="line"><a name="l01521"></a><span class="lineno"> 1521</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aec6d0d5055719576337687fc3702a899">MXDataIterGetPadNum</a>(DataIterHandle handle,</div>
-<div class="line"><a name="l01522"></a><span class="lineno"> 1522</span>                                   <span class="keywordtype">int</span> *pad);</div>
-<div class="line"><a name="l01523"></a><span class="lineno"> 1523</span> </div>
-<div class="line"><a name="l01530"></a><span class="lineno"> 1530</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a0c36af502d2a5746d841f50e1b0e71c0">MXDataIterGetLabel</a>(DataIterHandle handle,</div>
-<div class="line"><a name="l01531"></a><span class="lineno"> 1531</span>                                  NDArrayHandle *out);</div>
-<div class="line"><a name="l01532"></a><span class="lineno"> 1532</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01533"></a><span class="lineno"> 1533</span> <span class="comment">// Part 6: basic KVStore interface</span></div>
-<div class="line"><a name="l01534"></a><span class="lineno"> 1534</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01541"></a><span class="lineno"> 1541</span> <span class="comment"></span><a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a5c4a11b4fccf320d1af0f63b40fff58a">MXInitPSEnv</a>(mx_uint num_vars,</div>
-<div class="line"><a name="l01542"></a><span class="lineno"> 1542</span>                           <span class="keyword">const</span> <span class="keywordtype">char</span> **keys,</div>
-<div class="line"><a name="l01543"></a><span class="lineno"> 1543</span>                           <span class="keyword">const</span> <span class="keywordtype">char</span> **vals);</div>
-<div class="line"><a name="l01544"></a><span class="lineno"> 1544</span> </div>
-<div class="line"><a name="l01545"></a><span class="lineno"> 1545</span> </div>
-<div class="line"><a name="l01552"></a><span class="lineno"> 1552</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a2ef1a35e886c0addf294429e27ea9637">MXKVStoreCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span> *type,</div>
-<div class="line"><a name="l01553"></a><span class="lineno"> 1553</span>                               KVStoreHandle *out);</div>
-<div class="line"><a name="l01554"></a><span class="lineno"> 1554</span> </div>
-<div class="line"><a name="l01562"></a><span class="lineno"> 1562</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab9e3c15451a1bc1d89f83773ae1185a3">MXKVStoreSetGradientCompression</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01563"></a><span class="lineno"> 1563</span>                                               mx_uint num_params,</div>
-<div class="line"><a name="l01564"></a><span class="lineno"> 1564</span>                                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01565"></a><span class="lineno"> 1565</span>                                               <span class="keyword">const</span> <span class="keywordtype">char</span>** vals);</div>
-<div class="line"><a name="l01566"></a><span class="lineno"> 1566</span> </div>
-<div class="line"><a name="l01572"></a><span class="lineno"> 1572</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a915215184517bf663b0214fed32af6e7">MXKVStoreFree</a>(KVStoreHandle handle);</div>
-<div class="line"><a name="l01581"></a><span class="lineno"> 1581</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a475087b5464f9240d940ed4204a219b9">MXKVStoreInit</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01582"></a><span class="lineno"> 1582</span>                             mx_uint num,</div>
-<div class="line"><a name="l01583"></a><span class="lineno"> 1583</span>                             <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
-<div class="line"><a name="l01584"></a><span class="lineno"> 1584</span>                             NDArrayHandle* vals);</div>
-<div class="line"><a name="l01585"></a><span class="lineno"> 1585</span> </div>
-<div class="line"><a name="l01594"></a><span class="lineno"> 1594</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a4dc86980d7922f52970a8e3978bc87cb">MXKVStoreInitEx</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01595"></a><span class="lineno"> 1595</span>                               mx_uint num,</div>
-<div class="line"><a name="l01596"></a><span class="lineno"> 1596</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01597"></a><span class="lineno"> 1597</span>                               NDArrayHandle* vals);</div>
-<div class="line"><a name="l01598"></a><span class="lineno"> 1598</span> </div>
-<div class="line"><a name="l01608"></a><span class="lineno"> 1608</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a66c990fbb8e4bd320e5ad8d369155ad6">MXKVStorePush</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01609"></a><span class="lineno"> 1609</span>                             mx_uint num,</div>
-<div class="line"><a name="l01610"></a><span class="lineno"> 1610</span>                             <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
-<div class="line"><a name="l01611"></a><span class="lineno"> 1611</span>                             NDArrayHandle* vals,</div>
-<div class="line"><a name="l01612"></a><span class="lineno"> 1612</span>                             <span class="keywordtype">int</span> priority);</div>
-<div class="line"><a name="l01622"></a><span class="lineno"> 1622</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa59b7bf29d1a74fb1d1be6689c31913f">MXKVStorePushEx</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01623"></a><span class="lineno"> 1623</span>                               mx_uint num,</div>
-<div class="line"><a name="l01624"></a><span class="lineno"> 1624</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01625"></a><span class="lineno"> 1625</span>                               NDArrayHandle* vals,</div>
-<div class="line"><a name="l01626"></a><span class="lineno"> 1626</span>                               <span class="keywordtype">int</span> priority);</div>
-<div class="line"><a name="l01636"></a><span class="lineno"> 1636</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1230e78ca998f363192dc9b345a2442e">MXKVStorePull</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01637"></a><span class="lineno"> 1637</span>                             mx_uint num,</div>
-<div class="line"><a name="l01638"></a><span class="lineno"> 1638</span>                             <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
-<div class="line"><a name="l01639"></a><span class="lineno"> 1639</span>                             NDArrayHandle* vals,</div>
-<div class="line"><a name="l01640"></a><span class="lineno"> 1640</span>                             <span class="keywordtype">int</span> priority);</div>
-<div class="line"><a name="l01650"></a><span class="lineno"> 1650</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a25588eca6c42583151e77bcf71ffc3ac">MXKVStorePullEx</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01651"></a><span class="lineno"> 1651</span>                               mx_uint num,</div>
-<div class="line"><a name="l01652"></a><span class="lineno"> 1652</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01653"></a><span class="lineno"> 1653</span>                               NDArrayHandle* vals,</div>
-<div class="line"><a name="l01654"></a><span class="lineno"> 1654</span>                               <span class="keywordtype">int</span> priority);</div>
-<div class="line"><a name="l01655"></a><span class="lineno"> 1655</span> </div>
-<div class="line"><a name="l01668"></a><span class="lineno"> 1668</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a11fa0057c57ca3a6b0f6e8ec2660b050">MXKVStorePullRowSparse</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01669"></a><span class="lineno"> 1669</span>                                      mx_uint num,</div>
-<div class="line"><a name="l01670"></a><span class="lineno"> 1670</span>                                      <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
-<div class="line"><a name="l01671"></a><span class="lineno"> 1671</span>                                      NDArrayHandle* vals,</div>
-<div class="line"><a name="l01672"></a><span class="lineno"> 1672</span>                                      <span class="keyword">const</span> NDArrayHandle* row_ids,</div>
-<div class="line"><a name="l01673"></a><span class="lineno"> 1673</span>                                      <span class="keywordtype">int</span> priority);</div>
-<div class="line"><a name="l01686"></a><span class="lineno"> 1686</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a39d10b5d3d13635ad94bef1445306f45">MXKVStorePullRowSparseEx</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01687"></a><span class="lineno"> 1687</span>                                        mx_uint num,</div>
-<div class="line"><a name="l01688"></a><span class="lineno"> 1688</span>                                        <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
-<div class="line"><a name="l01689"></a><span class="lineno"> 1689</span>                                        NDArrayHandle* vals,</div>
-<div class="line"><a name="l01690"></a><span class="lineno"> 1690</span>                                        <span class="keyword">const</span> NDArrayHandle* row_ids,</div>
-<div class="line"><a name="l01691"></a><span class="lineno"> 1691</span>                                        <span class="keywordtype">int</span> priority);</div>
-<div class="line"><a name="l01692"></a><span class="lineno"> 1692</span> </div>
-<div class="line"><a name="l01701"></a><span class="lineno"><a class="line" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b"> 1701</a></span> <span class="keyword">typedef</span> void (<a class="code" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a>)(<span class="keywordtype">int</span> key,</div>
-<div class="line"><a name="l01702"></a><span class="lineno"> 1702</span>                                 NDArrayHandle recv,</div>
-<div class="line"><a name="l01703"></a><span class="lineno"> 1703</span>                                 NDArrayHandle local,</div>
-<div class="line"><a name="l01704"></a><span class="lineno"> 1704</span>                                 <span class="keywordtype">void</span> *handle);</div>
-<div class="line"><a name="l01713"></a><span class="lineno"><a class="line" href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42"> 1713</a></span> <span class="keyword">typedef</span> void (<a class="code" href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42">MXKVStoreStrUpdater</a>)(<span class="keyword">const</span> <span class="keywordtype">char</span>* key,</div>
-<div class="line"><a name="l01714"></a><span class="lineno"> 1714</span>                                    NDArrayHandle recv,</div>
-<div class="line"><a name="l01715"></a><span class="lineno"> 1715</span>                                    NDArrayHandle local,</div>
-<div class="line"><a name="l01716"></a><span class="lineno"> 1716</span>                                    <span class="keywordtype">void</span> *handle);</div>
-<div class="line"><a name="l01724"></a><span class="lineno"> 1724</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#adbdd2035afce961837866c711af4f0ab">MXKVStoreSetUpdater</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01725"></a><span class="lineno"> 1725</span>                                   <a class="code" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a> updater,</div>
-<div class="line"><a name="l01726"></a><span class="lineno"> 1726</span>                                   <span class="keywordtype">void</span> *updater_handle);</div>
-<div class="line"><a name="l01735"></a><span class="lineno"> 1735</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a70e08b32e56ad80a3557f831f6fd3b50">MXKVStoreSetUpdaterEx</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01736"></a><span class="lineno"> 1736</span>                                     <a class="code" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a> updater,</div>
-<div class="line"><a name="l01737"></a><span class="lineno"> 1737</span>                                     <a class="code" href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42">MXKVStoreStrUpdater</a> str_updater,</div>
-<div class="line"><a name="l01738"></a><span class="lineno"> 1738</span>                                     <span class="keywordtype">void</span> *updater_handle);</div>
-<div class="line"><a name="l01745"></a><span class="lineno"> 1745</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a9dba9ada3ed98c76fe78221013e37f07">MXKVStoreGetType</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01746"></a><span class="lineno"> 1746</span>                                <span class="keyword">const</span> <span class="keywordtype">char</span>** type);</div>
-<div class="line"><a name="l01747"></a><span class="lineno"> 1747</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01748"></a><span class="lineno"> 1748</span> <span class="comment">// Part 6: advanced KVStore for multi-machines</span></div>
-<div class="line"><a name="l01749"></a><span class="lineno"> 1749</span> <span class="comment">//--------------------------------------------</span></div>
-<div class="line"><a name="l01750"></a><span class="lineno"> 1750</span> </div>
-<div class="line"><a name="l01758"></a><span class="lineno"> 1758</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1edf063b82bdd94d1f5214056dd55144">MXKVStoreGetRank</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01759"></a><span class="lineno"> 1759</span>                                <span class="keywordtype">int</span> *ret);</div>
-<div class="line"><a name="l01760"></a><span class="lineno"> 1760</span> </div>
-<div class="line"><a name="l01770"></a><span class="lineno"> 1770</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aac41a2bd1dfcdebb920c97eab40ea07b">MXKVStoreGetGroupSize</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01771"></a><span class="lineno"> 1771</span>                                     <span class="keywordtype">int</span> *ret);</div>
-<div class="line"><a name="l01772"></a><span class="lineno"> 1772</span> </div>
-<div class="line"><a name="l01778"></a><span class="lineno"> 1778</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a67d719a0a5470f6292acb5896fbca4aa">MXKVStoreIsWorkerNode</a>(<span class="keywordtype">int</span> *ret);</div>
-<div class="line"><a name="l01779"></a><span class="lineno"> 1779</span> </div>
-<div class="line"><a name="l01780"></a><span class="lineno"> 1780</span> </div>
-<div class="line"><a name="l01786"></a><span class="lineno"> 1786</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a06bf33dff7bc03cdd7474460e515882d">MXKVStoreIsServerNode</a>(<span class="keywordtype">int</span> *ret);</div>
-<div class="line"><a name="l01787"></a><span class="lineno"> 1787</span> </div>
-<div class="line"><a name="l01788"></a><span class="lineno"> 1788</span> </div>
-<div class="line"><a name="l01794"></a><span class="lineno"> 1794</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aad401ff990cb4142070e024af7ac0781">MXKVStoreIsSchedulerNode</a>(<span class="keywordtype">int</span> *ret);</div>
-<div class="line"><a name="l01795"></a><span class="lineno"> 1795</span> </div>
-<div class="line"><a name="l01802"></a><span class="lineno"> 1802</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a96e8df6a3170c52b369954215dabe71c">MXKVStoreBarrier</a>(KVStoreHandle handle);</div>
-<div class="line"><a name="l01803"></a><span class="lineno"> 1803</span> </div>
-<div class="line"><a name="l01811"></a><span class="lineno"> 1811</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#add5b8e9bbd098e8fef5deab4c7ab2737">MXKVStoreSetBarrierBeforeExit</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01812"></a><span class="lineno"> 1812</span>                                             <span class="keyword">const</span> <span class="keywordtype">int</span> barrier_before_exit);</div>
-<div class="line"><a name="l01813"></a><span class="lineno"> 1813</span> </div>
-<div class="line"><a name="l01820"></a><span class="lineno"><a class="line" href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634"> 1820</a></span> <span class="keyword">typedef</span> void (<a class="code" href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634">MXKVStoreServerController</a>)(<span class="keywordtype">int</span> head,</div>
-<div class="line"><a name="l01821"></a><span class="lineno"> 1821</span>                                          <span class="keyword">const</span> <span class="keywordtype">char</span> *body,</div>
-<div class="line"><a name="l01822"></a><span class="lineno"> 1822</span>                                          <span class="keywordtype">void</span> *controller_handle);</div>
-<div class="line"><a name="l01823"></a><span class="lineno"> 1823</span> </div>
-<div class="line"><a name="l01832"></a><span class="lineno"> 1832</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a732c3edf14dc24784abb27fe2613a10a">MXKVStoreRunServer</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01833"></a><span class="lineno"> 1833</span>                                  <a class="code" href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634">MXKVStoreServerController</a> controller,</div>
-<div class="line"><a name="l01834"></a><span class="lineno"> 1834</span>                                  <span class="keywordtype">void</span> *controller_handle);</div>
-<div class="line"><a name="l01835"></a><span class="lineno"> 1835</span> </div>
-<div class="line"><a name="l01844"></a><span class="lineno"> 1844</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a5de45491037df7bf6b757b627100acd7">MXKVStoreSendCommmandToServers</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01845"></a><span class="lineno"> 1845</span>                                              <span class="keywordtype">int</span> cmd_id,</div>
-<div class="line"><a name="l01846"></a><span class="lineno"> 1846</span>                                              <span class="keyword">const</span> <span class="keywordtype">char</span>* cmd_body);</div>
-<div class="line"><a name="l01847"></a><span class="lineno"> 1847</span> </div>
-<div class="line"><a name="l01858"></a><span class="lineno"> 1858</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ac8b5138bcb7959cc831a97a24755e969">MXKVStoreGetNumDeadNode</a>(KVStoreHandle handle,</div>
-<div class="line"><a name="l01859"></a><span class="lineno"> 1859</span>                                       <span class="keyword">const</span> <span class="keywordtype">int</span> node_id,</div>
-<div class="line"><a name="l01860"></a><span class="lineno"> 1860</span>                                       <span class="keywordtype">int</span> *number,</div>
-<div class="line"><a name="l01861"></a><span class="lineno"> 1861</span>                                       <span class="keyword">const</span> <span class="keywordtype">int</span> timeout_sec <a class="code" href="c__api_8h.html#a2380be5ab258c3657553d0cef62936fe">DEFAULT</a>(60));</div>
-<div class="line"><a name="l01862"></a><span class="lineno"> 1862</span> </div>
-<div class="line"><a name="l01869"></a><span class="lineno"> 1869</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab2b93d730f48591b4f77f3968f6a1b98">MXRecordIOWriterCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span> *uri, RecordIOHandle *out);</div>
-<div class="line"><a name="l01870"></a><span class="lineno"> 1870</span> </div>
-<div class="line"><a name="l01876"></a><span class="lineno"> 1876</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab2a55917ffd918e334464f1b8a7a1ab9">MXRecordIOWriterFree</a>(RecordIOHandle handle);</div>
-<div class="line"><a name="l01877"></a><span class="lineno"> 1877</span> </div>
-<div class="line"><a name="l01885"></a><span class="lineno"> 1885</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a0c380b6aa00446e7cb6021c8e2df75f3">MXRecordIOWriterWriteRecord</a>(RecordIOHandle handle,</div>
-<div class="line"><a name="l01886"></a><span class="lineno"> 1886</span>                                           <span class="keyword">const</span> <span class="keywordtype">char</span> *buf, <span class="keywordtype">size_t</span> size);</div>
-<div class="line"><a name="l01887"></a><span class="lineno"> 1887</span> </div>
-<div class="line"><a name="l01894"></a><span class="lineno"> 1894</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a6cc8e9f94ec431edf8d9502f20d7b833">MXRecordIOWriterTell</a>(RecordIOHandle handle, <span class="keywordtype">size_t</span> *pos);</div>
-<div class="line"><a name="l01895"></a><span class="lineno"> 1895</span> </div>
-<div class="line"><a name="l01902"></a><span class="lineno"> 1902</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a4464ddbe13b8a4542d3f91bf055d50af">MXRecordIOReaderCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span> *uri, RecordIOHandle *out);</div>
-<div class="line"><a name="l01903"></a><span class="lineno"> 1903</span> </div>
-<div class="line"><a name="l01909"></a><span class="lineno"> 1909</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a92736f17a5d9fd58e53e621358e4ef31">MXRecordIOReaderFree</a>(RecordIOHandle handle);</div>
-<div class="line"><a name="l01910"></a><span class="lineno"> 1910</span> </div>
-<div class="line"><a name="l01918"></a><span class="lineno"> 1918</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a38e350f748967e0caa2c781a507667e4">MXRecordIOReaderReadRecord</a>(RecordIOHandle handle,</div>
-<div class="line"><a name="l01919"></a><span class="lineno"> 1919</span>                                         <span class="keywordtype">char</span> <span class="keyword">const</span> **buf, <span class="keywordtype">size_t</span> *size);</div>
-<div class="line"><a name="l01920"></a><span class="lineno"> 1920</span> </div>
-<div class="line"><a name="l01927"></a><span class="lineno"> 1927</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa992161083826ee6a434ade2790fbb31">MXRecordIOReaderSeek</a>(RecordIOHandle handle, <span class="keywordtype">size_t</span> pos);</div>
-<div class="line"><a name="l01928"></a><span class="lineno"> 1928</span> </div>
-<div class="line"><a name="l01935"></a><span class="lineno"> 1935</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a060d9a58a2e692af26fae433cecf7436">MXRecordIOReaderTell</a>(RecordIOHandle handle, <span class="keywordtype">size_t</span> *pos);</div>
-<div class="line"><a name="l01936"></a><span class="lineno"> 1936</span> </div>
-<div class="line"><a name="l01940"></a><span class="lineno"> 1940</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a87e98002ae1a925b65a8cf0773f802d3">MXRtcCreate</a>(<span class="keywordtype">char</span>* name, mx_uint num_input, mx_uint num_output,</div>
-<div class="line"><a name="l01941"></a><span class="lineno"> 1941</span>                           <span class="keywordtype">char</span>** input_names, <span class="keywordtype">char</span>** output_names,</div>
-<div class="line"><a name="l01942"></a><span class="lineno"> 1942</span>                           NDArrayHandle* inputs, NDArrayHandle* outputs,</div>
-<div class="line"><a name="l01943"></a><span class="lineno"> 1943</span>                           <span class="keywordtype">char</span>* kernel, RtcHandle *out);</div>
-<div class="line"><a name="l01944"></a><span class="lineno"> 1944</span> </div>
-<div class="line"><a name="l01948"></a><span class="lineno"> 1948</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a20f72cb4e18d5081eaf40875adcb6e98">MXRtcPush</a>(RtcHandle handle, mx_uint num_input, mx_uint num_output,</div>
-<div class="line"><a name="l01949"></a><span class="lineno"> 1949</span>                         NDArrayHandle* inputs, NDArrayHandle* outputs,</div>
-<div class="line"><a name="l01950"></a><span class="lineno"> 1950</span>                         mx_uint gridDimX,</div>
-<div class="line"><a name="l01951"></a><span class="lineno"> 1951</span>                         mx_uint gridDimY,</div>
-<div class="line"><a name="l01952"></a><span class="lineno"> 1952</span>                         mx_uint gridDimZ,</div>
-<div class="line"><a name="l01953"></a><span class="lineno"> 1953</span>                         mx_uint blockDimX,</div>
-<div class="line"><a name="l01954"></a><span class="lineno"> 1954</span>                         mx_uint blockDimY,</div>
-<div class="line"><a name="l01955"></a><span class="lineno"> 1955</span>                         mx_uint blockDimZ);</div>
-<div class="line"><a name="l01956"></a><span class="lineno"> 1956</span> </div>
-<div class="line"><a name="l01960"></a><span class="lineno"> 1960</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a13819b19ab3cdd91566b1d5cf8bc6b0a">MXRtcFree</a>(RtcHandle handle);</div>
-<div class="line"><a name="l01961"></a><span class="lineno"> 1961</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l01962"></a><span class="lineno"> 1962</span> <span class="comment"> * \brief register custom operators from frontend.</span></div>
-<div class="line"><a name="l01963"></a><span class="lineno"> 1963</span> <span class="comment"> * \param op_type name of custom op</span></div>
-<div class="line"><a name="l01964"></a><span class="lineno"> 1964</span> <span class="comment"> * \param creator</span></div>
-<div class="line"><a name="l01965"></a><span class="lineno"> 1965</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l01966"></a><span class="lineno"> 1966</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a92233cd9477ce12458bbd2b3e628dec9">MXCustomOpRegister</a>(<span class="keyword">const</span> <span class="keywordtype">char</span>* op_type, <a class="code" href="c__api_8h.html#a3a7fd875ee6aad3f20981764626a6c7b">CustomOpPropCreator</a> creator);</div>
-<div class="line"><a name="l01967"></a><span class="lineno"> 1967</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l01968"></a><span class="lineno"> 1968</span> <span class="comment"> * \brief record custom function for backward later.</span></div>
-<div class="line"><a name="l01969"></a><span class="lineno"> 1969</span> <span class="comment"> * \param num_inputs number of input NDArrays.</span></div>
-<div class="line"><a name="l01970"></a><span class="lineno"> 1970</span> <span class="comment"> * \param inputs handle to input NDArrays.</span></div>
-<div class="line"><a name="l01971"></a><span class="lineno"> 1971</span> <span class="comment"> * \param num_outputs number of output NDArrays.</span></div>
-<div class="line"><a name="l01972"></a><span class="lineno"> 1972</span> <span class="comment"> * \param outputs handle to output NDArrays.</span></div>
-<div class="line"><a name="l01973"></a><span class="lineno"> 1973</span> <span class="comment"> * \param callbacks callbacks for backward function.</span></div>
-<div class="line"><a name="l01974"></a><span class="lineno"> 1974</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l01975"></a><span class="lineno"> 1975</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ae1f7686cd08133d865ed6b9ea23eb31b">MXCustomFunctionRecord</a>(<span class="keywordtype">int</span> num_inputs, NDArrayHandle *inputs,</div>
-<div class="line"><a name="l01976"></a><span class="lineno"> 1976</span>                                      <span class="keywordtype">int</span> num_outputs, NDArrayHandle *outputs,</div>
-<div class="line"><a name="l01977"></a><span class="lineno"> 1977</span>                                      <span class="keyword">struct</span> <a class="code" href="structMXCallbackList.html">MXCallbackList</a> *<a class="code" href="structMXCallbackList.html#ad56325e57f5fffc2b920eb3f8f378199">callbacks</a>);</div>
+<div class="line"><a name="l01054"></a><span class="lineno"> 1054</span> </div>
+<div class="line"><a name="l01061"></a><span class="lineno"> 1061</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a970b94e3dfb72a2a76065149180c4f1f">MXSymbolGetNumOutputs</a>(SymbolHandle symbol,</div>
+<div class="line"><a name="l01062"></a><span class="lineno"> 1062</span>                                      mx_uint *output_count);</div>
+<div class="line"><a name="l01063"></a><span class="lineno"> 1063</span> </div>
+<div class="line"><a name="l01070"></a><span class="lineno"> 1070</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a13816872b73d6c474686d985747e8c65">MXSymbolGetInternals</a>(SymbolHandle symbol,</div>
+<div class="line"><a name="l01071"></a><span class="lineno"> 1071</span>                                    SymbolHandle *out);</div>
+<div class="line"><a name="l01078"></a><span class="lineno"> 1078</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a6050b4cfcb75ed3d0a44afb1b8df45cd">MXSymbolGetChildren</a>(SymbolHandle symbol,</div>
+<div class="line"><a name="l01079"></a><span class="lineno"> 1079</span>                                   SymbolHandle *out);</div>
+<div class="line"><a name="l01087"></a><span class="lineno"> 1087</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a8e601320c59679d9423179f23f281bf2">MXSymbolGetOutput</a>(SymbolHandle symbol,</div>
+<div class="line"><a name="l01088"></a><span class="lineno"> 1088</span>                                 mx_uint index,</div>
+<div class="line"><a name="l01089"></a><span class="lineno"> 1089</span>                                 SymbolHandle *out);</div>
+<div class="line"><a name="l01090"></a><span class="lineno"> 1090</span> </div>
+<div class="line"><a name="l01098"></a><span class="lineno"> 1098</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a38e75c5a41473912e2d6cdfececba84d">MXSymbolListAuxiliaryStates</a>(SymbolHandle symbol,</div>
+<div class="line"><a name="l01099"></a><span class="lineno"> 1099</span>                                           mx_uint *out_size,</div>
+<div class="line"><a name="l01100"></a><span class="lineno"> 1100</span>                                           <span class="keyword">const</span> <span class="keywordtype">char</span> ***out_str_array);</div>
+<div class="line"><a name="l01115"></a><span class="lineno"> 1115</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a879a5ee991393d364b787e948b4c9961">MXSymbolCompose</a>(SymbolHandle sym,</div>
+<div class="line"><a name="l01116"></a><span class="lineno"> 1116</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span> *name,</div>
+<div class="line"><a name="l01117"></a><span class="lineno"> 1117</span>                               mx_uint num_args,</div>
+<div class="line"><a name="l01118"></a><span class="lineno"> 1118</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01119"></a><span class="lineno"> 1119</span>                               SymbolHandle* args);</div>
+<div class="line"><a name="l01129"></a><span class="lineno"> 1129</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ac2268c0bc7ad2f96d36bbbdab9a51c2e">MXSymbolGrad</a>(SymbolHandle sym,</div>
+<div class="line"><a name="l01130"></a><span class="lineno"> 1130</span>                            mx_uint num_wrt,</div>
+<div class="line"><a name="l01131"></a><span class="lineno"> 1131</span>                            <span class="keyword">const</span> <span class="keywordtype">char</span>** wrt,</div>
+<div class="line"><a name="l01132"></a><span class="lineno"> 1132</span>                            SymbolHandle* out);</div>
+<div class="line"><a name="l01155"></a><span class="lineno"> 1155</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#afaf03324563098f1ac4c23ecfc88b784">MXSymbolInferShape</a>(SymbolHandle sym,</div>
+<div class="line"><a name="l01156"></a><span class="lineno"> 1156</span>                                  mx_uint num_args,</div>
+<div class="line"><a name="l01157"></a><span class="lineno"> 1157</span>                                  <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01158"></a><span class="lineno"> 1158</span>                                  <span class="keyword">const</span> mx_uint *arg_ind_ptr,</div>
+<div class="line"><a name="l01159"></a><span class="lineno"> 1159</span>                                  <span class="keyword">const</span> mx_uint *arg_shape_data,</div>
+<div class="line"><a name="l01160"></a><span class="lineno"> 1160</span>                                  mx_uint *in_shape_size,</div>
+<div class="line"><a name="l01161"></a><span class="lineno"> 1161</span>                                  <span class="keyword">const</span> mx_uint **in_shape_ndim,</div>
+<div class="line"><a name="l01162"></a><span class="lineno"> 1162</span>                                  <span class="keyword">const</span> mx_uint ***in_shape_data,</div>
+<div class="line"><a name="l01163"></a><span class="lineno"> 1163</span>                                  mx_uint *out_shape_size,</div>
+<div class="line"><a name="l01164"></a><span class="lineno"> 1164</span>                                  <span class="keyword">const</span> mx_uint **out_shape_ndim,</div>
+<div class="line"><a name="l01165"></a><span class="lineno"> 1165</span>                                  <span class="keyword">const</span> mx_uint ***out_shape_data,</div>
+<div class="line"><a name="l01166"></a><span class="lineno"> 1166</span>                                  mx_uint *aux_shape_size,</div>
+<div class="line"><a name="l01167"></a><span class="lineno"> 1167</span>                                  <span class="keyword">const</span> mx_uint **aux_shape_ndim,</div>
+<div class="line"><a name="l01168"></a><span class="lineno"> 1168</span>                                  <span class="keyword">const</span> mx_uint ***aux_shape_data,</div>
+<div class="line"><a name="l01169"></a><span class="lineno"> 1169</span>                                  <span class="keywordtype">int</span> *complete);</div>
+<div class="line"><a name="l01194"></a><span class="lineno"> 1194</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a664c85b47c69b81be3b9ea1dfcb72a94">MXSymbolInferShapePartial</a>(SymbolHandle sym,</div>
+<div class="line"><a name="l01195"></a><span class="lineno"> 1195</span>                                         mx_uint num_args,</div>
+<div class="line"><a name="l01196"></a><span class="lineno"> 1196</span>                                         <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01197"></a><span class="lineno"> 1197</span>                                         <span class="keyword">const</span> mx_uint *arg_ind_ptr,</div>
+<div class="line"><a name="l01198"></a><span class="lineno"> 1198</span>                                         <span class="keyword">const</span> mx_uint *arg_shape_data,</div>
+<div class="line"><a name="l01199"></a><span class="lineno"> 1199</span>                                         mx_uint *in_shape_size,</div>
+<div class="line"><a name="l01200"></a><span class="lineno"> 1200</span>                                         <span class="keyword">const</span> mx_uint **in_shape_ndim,</div>
+<div class="line"><a name="l01201"></a><span class="lineno"> 1201</span>                                         <span class="keyword">const</span> mx_uint ***in_shape_data,</div>
+<div class="line"><a name="l01202"></a><span class="lineno"> 1202</span>                                         mx_uint *out_shape_size,</div>
+<div class="line"><a name="l01203"></a><span class="lineno"> 1203</span>                                         <span class="keyword">const</span> mx_uint **out_shape_ndim,</div>
+<div class="line"><a name="l01204"></a><span class="lineno"> 1204</span>                                         <span class="keyword">const</span> mx_uint ***out_shape_data,</div>
+<div class="line"><a name="l01205"></a><span class="lineno"> 1205</span>                                         mx_uint *aux_shape_size,</div>
+<div class="line"><a name="l01206"></a><span class="lineno"> 1206</span>                                         <span class="keyword">const</span> mx_uint **aux_shape_ndim,</div>
+<div class="line"><a name="l01207"></a><span class="lineno"> 1207</span>                                         <span class="keyword">const</span> mx_uint ***aux_shape_data,</div>
+<div class="line"><a name="l01208"></a><span class="lineno"> 1208</span>                                         <span class="keywordtype">int</span> *complete);</div>
+<div class="line"><a name="l01209"></a><span class="lineno"> 1209</span> </div>
+<div class="line"><a name="l01228"></a><span class="lineno"> 1228</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1eff7ea3fd2fe56509c75f5a610890e6">MXSymbolInferType</a>(SymbolHandle sym,</div>
+<div class="line"><a name="l01229"></a><span class="lineno"> 1229</span>                                 mx_uint num_args,</div>
+<div class="line"><a name="l01230"></a><span class="lineno"> 1230</span>                                 <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01231"></a><span class="lineno"> 1231</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> *arg_type_data,</div>
+<div class="line"><a name="l01232"></a><span class="lineno"> 1232</span>                                 mx_uint *in_type_size,</div>
+<div class="line"><a name="l01233"></a><span class="lineno"> 1233</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> **in_type_data,</div>
+<div class="line"><a name="l01234"></a><span class="lineno"> 1234</span>                                 mx_uint *out_type_size,</div>
+<div class="line"><a name="l01235"></a><span class="lineno"> 1235</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> **out_type_data,</div>
+<div class="line"><a name="l01236"></a><span class="lineno"> 1236</span>                                 mx_uint *aux_type_size,</div>
+<div class="line"><a name="l01237"></a><span class="lineno"> 1237</span>                                 <span class="keyword">const</span> <span class="keywordtype">int</span> **aux_type_data,</div>
+<div class="line"><a name="l01238"></a><span class="lineno"> 1238</span>                                 <span class="keywordtype">int</span> *complete);</div>
+<div class="line"><a name="l01239"></a><span class="lineno"> 1239</span> </div>
+<div class="line"><a name="l01240"></a><span class="lineno"> 1240</span> </div>
+<div class="line"><a name="l01241"></a><span class="lineno"> 1241</span> </div>
+<div class="line"><a name="l01242"></a><span class="lineno"> 1242</span> </div>
+<div class="line"><a name="l01243"></a><span class="lineno"> 1243</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01244"></a><span class="lineno"> 1244</span> <span class="comment">// Part 4: Executor interface</span></div>
+<div class="line"><a name="l01245"></a><span class="lineno"> 1245</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01251"></a><span class="lineno"> 1251</span> <span class="comment"></span><a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ade1487656dfba3cae12c5bf676e08dcf">MXExecutorFree</a>(ExecutorHandle handle);</div>
+<div class="line"><a name="l01258"></a><span class="lineno"> 1258</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ac9f776a6e7be0bacda4dcd3efa5f7dc5">MXExecutorPrint</a>(ExecutorHandle handle, <span class="keyword">const</span> <span class="keywordtype">char</span> **out_str);</div>
+<div class="line"><a name="l01266"></a><span class="lineno"> 1266</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1f9987e701b810e0e79b5207fa215415">MXExecutorForward</a>(ExecutorHandle handle, <span class="keywordtype">int</span> is_train);</div>
+<div class="line"><a name="l01276"></a><span class="lineno"> 1276</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ad8f92bb15e11058abe12959cfc4dcbb3">MXExecutorBackward</a>(ExecutorHandle handle,</div>
+<div class="line"><a name="l01277"></a><span class="lineno"> 1277</span>                                  mx_uint len,</div>
+<div class="line"><a name="l01278"></a><span class="lineno"> 1278</span>                                  NDArrayHandle *head_grads);</div>
+<div class="line"><a name="l01289"></a><span class="lineno"> 1289</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a3a211f3e0c6270271ce4fe79228dd0c2">MXExecutorBackwardEx</a>(ExecutorHandle handle,</div>
+<div class="line"><a name="l01290"></a><span class="lineno"> 1290</span>                                    mx_uint len,</div>
+<div class="line"><a name="l01291"></a><span class="lineno"> 1291</span>                                    NDArrayHandle *head_grads,</div>
+<div class="line"><a name="l01292"></a><span class="lineno"> 1292</span>                                    <span class="keywordtype">int</span> is_train);</div>
+<div class="line"><a name="l01301"></a><span class="lineno"> 1301</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a442845c8ef89d4c849f468ea2b7717f7">MXExecutorOutputs</a>(ExecutorHandle handle,</div>
+<div class="line"><a name="l01302"></a><span class="lineno"> 1302</span>                                 mx_uint *out_size,</div>
+<div class="line"><a name="l01303"></a><span class="lineno"> 1303</span>                                 NDArrayHandle **out);</div>
+<div class="line"><a name="l01304"></a><span class="lineno"> 1304</span> </div>
+<div class="line"><a name="l01320"></a><span class="lineno"> 1320</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a10a6deacde18b7d52b44eee0ddddf0e1">MXExecutorBind</a>(SymbolHandle symbol_handle,</div>
+<div class="line"><a name="l01321"></a><span class="lineno"> 1321</span>                              <span class="keywordtype">int</span> dev_type,</div>
+<div class="line"><a name="l01322"></a><span class="lineno"> 1322</span>                              <span class="keywordtype">int</span> dev_id,</div>
+<div class="line"><a name="l01323"></a><span class="lineno"> 1323</span>                              mx_uint len,</div>
+<div class="line"><a name="l01324"></a><span class="lineno"> 1324</span>                              NDArrayHandle *in_args,</div>
+<div class="line"><a name="l01325"></a><span class="lineno"> 1325</span>                              NDArrayHandle *arg_grad_store,</div>
+<div class="line"><a name="l01326"></a><span class="lineno"> 1326</span>                              mx_uint *grad_req_type,</div>
+<div class="line"><a name="l01327"></a><span class="lineno"> 1327</span>                              mx_uint aux_states_len,</div>
+<div class="line"><a name="l01328"></a><span class="lineno"> 1328</span>                              NDArrayHandle *aux_states,</div>
+<div class="line"><a name="l01329"></a><span class="lineno"> 1329</span>                              ExecutorHandle *out);</div>
+<div class="line"><a name="l01351"></a><span class="lineno"> 1351</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ad1646370458f8a3ddb9e4f3365c5e510">MXExecutorBindX</a>(SymbolHandle symbol_handle,</div>
+<div class="line"><a name="l01352"></a><span class="lineno"> 1352</span>                               <span class="keywordtype">int</span> dev_type,</div>
+<div class="line"><a name="l01353"></a><span class="lineno"> 1353</span>                               <span class="keywordtype">int</span> dev_id,</div>
+<div class="line"><a name="l01354"></a><span class="lineno"> 1354</span>                               mx_uint num_map_keys,</div>
+<div class="line"><a name="l01355"></a><span class="lineno"> 1355</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** map_keys,</div>
+<div class="line"><a name="l01356"></a><span class="lineno"> 1356</span>                               <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_types,</div>
+<div class="line"><a name="l01357"></a><span class="lineno"> 1357</span>                               <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_ids,</div>
+<div class="line"><a name="l01358"></a><span class="lineno"> 1358</span>                               mx_uint len,</div>
+<div class="line"><a name="l01359"></a><span class="lineno"> 1359</span>                               NDArrayHandle *in_args,</div>
+<div class="line"><a name="l01360"></a><span class="lineno"> 1360</span>                               NDArrayHandle *arg_grad_store,</div>
+<div class="line"><a name="l01361"></a><span class="lineno"> 1361</span>                               mx_uint *grad_req_type,</div>
+<div class="line"><a name="l01362"></a><span class="lineno"> 1362</span>                               mx_uint aux_states_len,</div>
+<div class="line"><a name="l01363"></a><span class="lineno"> 1363</span>                               NDArrayHandle *aux_states,</div>
+<div class="line"><a name="l01364"></a><span class="lineno"> 1364</span>                               ExecutorHandle *out);</div>
+<div class="line"><a name="l01387"></a><span class="lineno"> 1387</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a8139e691c88ebcf082f9e954598e0e86">MXExecutorBindEX</a>(SymbolHandle symbol_handle,</div>
+<div class="line"><a name="l01388"></a><span class="lineno"> 1388</span>                                <span class="keywordtype">int</span> dev_type,</div>
+<div class="line"><a name="l01389"></a><span class="lineno"> 1389</span>                                <span class="keywordtype">int</span> dev_id,</div>
+<div class="line"><a name="l01390"></a><span class="lineno"> 1390</span>                                mx_uint num_map_keys,</div>
+<div class="line"><a name="l01391"></a><span class="lineno"> 1391</span>                                <span class="keyword">const</span> <span class="keywordtype">char</span>** map_keys,</div>
+<div class="line"><a name="l01392"></a><span class="lineno"> 1392</span>                                <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_types,</div>
+<div class="line"><a name="l01393"></a><span class="lineno"> 1393</span>                                <span class="keyword">const</span> <span class="keywordtype">int</span>* map_dev_ids,</div>
+<div class="line"><a name="l01394"></a><span class="lineno"> 1394</span>                                mx_uint len,</div>
+<div class="line"><a name="l01395"></a><span class="lineno"> 1395</span>                                NDArrayHandle *in_args,</div>
+<div class="line"><a name="l01396"></a><span class="lineno"> 1396</span>                                NDArrayHandle *arg_grad_store,</div>
+<div class="line"><a name="l01397"></a><span class="lineno"> 1397</span>                                mx_uint *grad_req_type,</div>
+<div class="line"><a name="l01398"></a><span class="lineno"> 1398</span>                                mx_uint aux_states_len,</div>
+<div class="line"><a name="l01399"></a><span class="lineno"> 1399</span>                                NDArrayHandle *aux_states,</div>
+<div class="line"><a name="l01400"></a><span class="lineno"> 1400</span>                                ExecutorHandle shared_exec,</div>
+<div class="line"><a name="l01401"></a><span class="lineno"> 1401</span>                                ExecutorHandle *out);</div>
+<div class="line"><a name="l01402"></a><span class="lineno"> 1402</span> </div>
+<div class="line"><a name="l01403"></a><span class="lineno"> 1403</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ae8bc5c6380c5c7aaa6c8c6b884482e29">MXExecutorSimpleBind</a>(SymbolHandle symbol_handle,</div>
+<div class="line"><a name="l01404"></a><span class="lineno"> 1404</span>                                    <span class="keywordtype">int</span> dev_type,</div>
+<div class="line"><a name="l01405"></a><span class="lineno"> 1405</span>                                    <span class="keywordtype">int</span> dev_id,</div>
+<div class="line"><a name="l01406"></a><span class="lineno"> 1406</span>                                    <span class="keyword">const</span> mx_uint num_g2c_keys,</div>
+<div class="line"><a name="l01407"></a><span class="lineno"> 1407</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** g2c_keys,</div>
+<div class="line"><a name="l01408"></a><span class="lineno"> 1408</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* g2c_dev_types,</div>
+<div class="line"><a name="l01409"></a><span class="lineno"> 1409</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* g2c_dev_ids,</div>
+<div class="line"><a name="l01410"></a><span class="lineno"> 1410</span>                                    <span class="keyword">const</span> mx_uint provided_grad_req_list_len,</div>
+<div class="line"><a name="l01411"></a><span class="lineno"> 1411</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_grad_req_names,</div>
+<div class="line"><a name="l01412"></a><span class="lineno"> 1412</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_grad_req_types,</div>
+<div class="line"><a name="l01413"></a><span class="lineno"> 1413</span>                                    <span class="keyword">const</span> mx_uint num_provided_arg_shapes,</div>
+<div class="line"><a name="l01414"></a><span class="lineno"> 1414</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_arg_shape_names,</div>
+<div class="line"><a name="l01415"></a><span class="lineno"> 1415</span>                                    <span class="keyword">const</span> mx_uint* provided_arg_shape_data,</div>
+<div class="line"><a name="l01416"></a><span class="lineno"> 1416</span>                                    <span class="keyword">const</span> mx_uint* provided_arg_shape_idx,</div>
+<div class="line"><a name="l01417"></a><span class="lineno"> 1417</span>                                    <span class="keyword">const</span> mx_uint num_provided_arg_dtypes,</div>
+<div class="line"><a name="l01418"></a><span class="lineno"> 1418</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_arg_dtype_names,</div>
+<div class="line"><a name="l01419"></a><span class="lineno"> 1419</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* provided_arg_dtypes,</div>
+<div class="line"><a name="l01420"></a><span class="lineno"> 1420</span>                                    <span class="keyword">const</span> mx_uint num_provided_arg_stypes,</div>
+<div class="line"><a name="l01421"></a><span class="lineno"> 1421</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** provided_arg_stype_names,</div>
+<div class="line"><a name="l01422"></a><span class="lineno"> 1422</span>                                    <span class="keyword">const</span> <span class="keywordtype">int</span>* provided_arg_stypes,</div>
+<div class="line"><a name="l01423"></a><span class="lineno"> 1423</span>                                    <span class="keyword">const</span> mx_uint num_shared_arg_names,</div>
+<div class="line"><a name="l01424"></a><span class="lineno"> 1424</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** shared_arg_name_list,</div>
+<div class="line"><a name="l01425"></a><span class="lineno"> 1425</span>                                    <span class="keywordtype">int</span>* shared_buffer_len,</div>
+<div class="line"><a name="l01426"></a><span class="lineno"> 1426</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>** shared_buffer_name_list,</div>
+<div class="line"><a name="l01427"></a><span class="lineno"> 1427</span>                                    NDArrayHandle* shared_buffer_handle_list,</div>
+<div class="line"><a name="l01428"></a><span class="lineno"> 1428</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span>*** updated_shared_buffer_name_list,</div>
+<div class="line"><a name="l01429"></a><span class="lineno"> 1429</span>                                    NDArrayHandle** updated_shared_buffer_handle_list,</div>
+<div class="line"><a name="l01430"></a><span class="lineno"> 1430</span>                                    mx_uint* num_in_args,</div>
+<div class="line"><a name="l01431"></a><span class="lineno"> 1431</span>                                    NDArrayHandle** in_args,</div>
+<div class="line"><a name="l01432"></a><span class="lineno"> 1432</span>                                    NDArrayHandle** arg_grads,</div>
+<div class="line"><a name="l01433"></a><span class="lineno"> 1433</span>                                    mx_uint* num_aux_states,</div>
+<div class="line"><a name="l01434"></a><span class="lineno"> 1434</span>                                    NDArrayHandle** aux_states,</div>
+<div class="line"><a name="l01435"></a><span class="lineno"> 1435</span>                                    ExecutorHandle shared_exec_handle,</div>
+<div class="line"><a name="l01436"></a><span class="lineno"> 1436</span>                                    ExecutorHandle* out);</div>
+<div class="line"><a name="l01440"></a><span class="lineno"> 1440</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#abae91a5d7847020e14395e078144489a">MXExecutorSetMonitorCallback</a>(ExecutorHandle handle,</div>
+<div class="line"><a name="l01441"></a><span class="lineno"> 1441</span>                                            <a class="code" href="c__api_8h.html#aa7071c027141653df82d2b72db566acd">ExecutorMonitorCallback</a> callback,</div>
+<div class="line"><a name="l01442"></a><span class="lineno"> 1442</span>                                            <span class="keywordtype">void</span>* callback_handle);</div>
+<div class="line"><a name="l01443"></a><span class="lineno"> 1443</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01444"></a><span class="lineno"> 1444</span> <span class="comment">// Part 5: IO Interface</span></div>
+<div class="line"><a name="l01445"></a><span class="lineno"> 1445</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01452"></a><span class="lineno"> 1452</span> <span class="comment"></span><a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa13fb35019b0c42dcfee185b2f9e09d0">MXListDataIters</a>(mx_uint *out_size,</div>
+<div class="line"><a name="l01453"></a><span class="lineno"> 1453</span>                               DataIterCreator **out_array);</div>
+<div class="line"><a name="l01464"></a><span class="lineno"> 1464</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a361c4a5360734e430c54a754cc581354">MXDataIterCreateIter</a>(DataIterCreator handle,</div>
+<div class="line"><a name="l01465"></a><span class="lineno"> 1465</span>                                    mx_uint num_param,</div>
+<div class="line"><a name="l01466"></a><span class="lineno"> 1466</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span> **keys,</div>
+<div class="line"><a name="l01467"></a><span class="lineno"> 1467</span>                                    <span class="keyword">const</span> <span class="keywordtype">char</span> **vals,</div>
+<div class="line"><a name="l01468"></a><span class="lineno"> 1468</span>                                    DataIterHandle *out);</div>
+<div class="line"><a name="l01480"></a><span class="lineno"> 1480</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a795614eb68a4fead826202a5b5ba9517">MXDataIterGetIterInfo</a>(DataIterCreator creator,</div>
+<div class="line"><a name="l01481"></a><span class="lineno"> 1481</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> **name,</div>
+<div class="line"><a name="l01482"></a><span class="lineno"> 1482</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> **description,</div>
+<div class="line"><a name="l01483"></a><span class="lineno"> 1483</span>                                     mx_uint *num_args,</div>
+<div class="line"><a name="l01484"></a><span class="lineno"> 1484</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> ***arg_names,</div>
+<div class="line"><a name="l01485"></a><span class="lineno"> 1485</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> ***arg_type_infos,</div>
+<div class="line"><a name="l01486"></a><span class="lineno"> 1486</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span> ***arg_descriptions);</div>
+<div class="line"><a name="l01492"></a><span class="lineno"> 1492</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a29e117922acfbc95828433e736bcc3af">MXDataIterFree</a>(DataIterHandle handle);</div>
+<div class="line"><a name="l01499"></a><span class="lineno"> 1499</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a8be0adeef4b87243a5eda12f17e64e67">MXDataIterNext</a>(DataIterHandle handle,</div>
+<div class="line"><a name="l01500"></a><span class="lineno"> 1500</span>                              <span class="keywordtype">int</span> *out);</div>
+<div class="line"><a name="l01506"></a><span class="lineno"> 1506</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a21df19f3242a68d1c536783fcf35968a">MXDataIterBeforeFirst</a>(DataIterHandle handle);</div>
+<div class="line"><a name="l01507"></a><span class="lineno"> 1507</span> </div>
+<div class="line"><a name="l01514"></a><span class="lineno"> 1514</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a0920d095f423b8bb34095406ea93a697">MXDataIterGetData</a>(DataIterHandle handle,</div>
+<div class="line"><a name="l01515"></a><span class="lineno"> 1515</span>                                 NDArrayHandle *out);</div>
+<div class="line"><a name="l01523"></a><span class="lineno"> 1523</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#af1b36a78dd74707c10d31c39c4d429b5">MXDataIterGetIndex</a>(DataIterHandle handle,</div>
+<div class="line"><a name="l01524"></a><span class="lineno"> 1524</span>                                  uint64_t **out_index,</div>
+<div class="line"><a name="l01525"></a><span class="lineno"> 1525</span>                                  uint64_t *out_size);</div>
+<div class="line"><a name="l01532"></a><span class="lineno"> 1532</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aec6d0d5055719576337687fc3702a899">MXDataIterGetPadNum</a>(DataIterHandle handle,</div>
+<div class="line"><a name="l01533"></a><span class="lineno"> 1533</span>                                   <span class="keywordtype">int</span> *pad);</div>
+<div class="line"><a name="l01534"></a><span class="lineno"> 1534</span> </div>
+<div class="line"><a name="l01541"></a><span class="lineno"> 1541</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a0c36af502d2a5746d841f50e1b0e71c0">MXDataIterGetLabel</a>(DataIterHandle handle,</div>
+<div class="line"><a name="l01542"></a><span class="lineno"> 1542</span>                                  NDArrayHandle *out);</div>
+<div class="line"><a name="l01543"></a><span class="lineno"> 1543</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01544"></a><span class="lineno"> 1544</span> <span class="comment">// Part 6: basic KVStore interface</span></div>
+<div class="line"><a name="l01545"></a><span class="lineno"> 1545</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01552"></a><span class="lineno"> 1552</span> <span class="comment"></span><a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a5c4a11b4fccf320d1af0f63b40fff58a">MXInitPSEnv</a>(mx_uint num_vars,</div>
+<div class="line"><a name="l01553"></a><span class="lineno"> 1553</span>                           <span class="keyword">const</span> <span class="keywordtype">char</span> **keys,</div>
+<div class="line"><a name="l01554"></a><span class="lineno"> 1554</span>                           <span class="keyword">const</span> <span class="keywordtype">char</span> **vals);</div>
+<div class="line"><a name="l01555"></a><span class="lineno"> 1555</span> </div>
+<div class="line"><a name="l01556"></a><span class="lineno"> 1556</span> </div>
+<div class="line"><a name="l01563"></a><span class="lineno"> 1563</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a2ef1a35e886c0addf294429e27ea9637">MXKVStoreCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span> *type,</div>
+<div class="line"><a name="l01564"></a><span class="lineno"> 1564</span>                               KVStoreHandle *out);</div>
+<div class="line"><a name="l01565"></a><span class="lineno"> 1565</span> </div>
+<div class="line"><a name="l01573"></a><span class="lineno"> 1573</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab9e3c15451a1bc1d89f83773ae1185a3">MXKVStoreSetGradientCompression</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01574"></a><span class="lineno"> 1574</span>                                               mx_uint num_params,</div>
+<div class="line"><a name="l01575"></a><span class="lineno"> 1575</span>                                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01576"></a><span class="lineno"> 1576</span>                                               <span class="keyword">const</span> <span class="keywordtype">char</span>** vals);</div>
+<div class="line"><a name="l01577"></a><span class="lineno"> 1577</span> </div>
+<div class="line"><a name="l01583"></a><span class="lineno"> 1583</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a915215184517bf663b0214fed32af6e7">MXKVStoreFree</a>(KVStoreHandle handle);</div>
+<div class="line"><a name="l01592"></a><span class="lineno"> 1592</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a475087b5464f9240d940ed4204a219b9">MXKVStoreInit</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01593"></a><span class="lineno"> 1593</span>                             mx_uint num,</div>
+<div class="line"><a name="l01594"></a><span class="lineno"> 1594</span>                             <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
+<div class="line"><a name="l01595"></a><span class="lineno"> 1595</span>                             NDArrayHandle* vals);</div>
+<div class="line"><a name="l01596"></a><span class="lineno"> 1596</span> </div>
+<div class="line"><a name="l01605"></a><span class="lineno"> 1605</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a4dc86980d7922f52970a8e3978bc87cb">MXKVStoreInitEx</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01606"></a><span class="lineno"> 1606</span>                               mx_uint num,</div>
+<div class="line"><a name="l01607"></a><span class="lineno"> 1607</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01608"></a><span class="lineno"> 1608</span>                               NDArrayHandle* vals);</div>
+<div class="line"><a name="l01609"></a><span class="lineno"> 1609</span> </div>
+<div class="line"><a name="l01619"></a><span class="lineno"> 1619</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a66c990fbb8e4bd320e5ad8d369155ad6">MXKVStorePush</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01620"></a><span class="lineno"> 1620</span>                             mx_uint num,</div>
+<div class="line"><a name="l01621"></a><span class="lineno"> 1621</span>                             <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
+<div class="line"><a name="l01622"></a><span class="lineno"> 1622</span>                             NDArrayHandle* vals,</div>
+<div class="line"><a name="l01623"></a><span class="lineno"> 1623</span>                             <span class="keywordtype">int</span> priority);</div>
+<div class="line"><a name="l01633"></a><span class="lineno"> 1633</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa59b7bf29d1a74fb1d1be6689c31913f">MXKVStorePushEx</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01634"></a><span class="lineno"> 1634</span>                               mx_uint num,</div>
+<div class="line"><a name="l01635"></a><span class="lineno"> 1635</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01636"></a><span class="lineno"> 1636</span>                               NDArrayHandle* vals,</div>
+<div class="line"><a name="l01637"></a><span class="lineno"> 1637</span>                               <span class="keywordtype">int</span> priority);</div>
+<div class="line"><a name="l01647"></a><span class="lineno"> 1647</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1230e78ca998f363192dc9b345a2442e">MXKVStorePull</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01648"></a><span class="lineno"> 1648</span>                             mx_uint num,</div>
+<div class="line"><a name="l01649"></a><span class="lineno"> 1649</span>                             <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
+<div class="line"><a name="l01650"></a><span class="lineno"> 1650</span>                             NDArrayHandle* vals,</div>
+<div class="line"><a name="l01651"></a><span class="lineno"> 1651</span>                             <span class="keywordtype">int</span> priority);</div>
+<div class="line"><a name="l01661"></a><span class="lineno"> 1661</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a25588eca6c42583151e77bcf71ffc3ac">MXKVStorePullEx</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01662"></a><span class="lineno"> 1662</span>                               mx_uint num,</div>
+<div class="line"><a name="l01663"></a><span class="lineno"> 1663</span>                               <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01664"></a><span class="lineno"> 1664</span>                               NDArrayHandle* vals,</div>
+<div class="line"><a name="l01665"></a><span class="lineno"> 1665</span>                               <span class="keywordtype">int</span> priority);</div>
+<div class="line"><a name="l01666"></a><span class="lineno"> 1666</span> </div>
+<div class="line"><a name="l01679"></a><span class="lineno"> 1679</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a11fa0057c57ca3a6b0f6e8ec2660b050">MXKVStorePullRowSparse</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01680"></a><span class="lineno"> 1680</span>                                      mx_uint num,</div>
+<div class="line"><a name="l01681"></a><span class="lineno"> 1681</span>                                      <span class="keyword">const</span> <span class="keywordtype">int</span>* keys,</div>
+<div class="line"><a name="l01682"></a><span class="lineno"> 1682</span>                                      NDArrayHandle* vals,</div>
+<div class="line"><a name="l01683"></a><span class="lineno"> 1683</span>                                      <span class="keyword">const</span> NDArrayHandle* row_ids,</div>
+<div class="line"><a name="l01684"></a><span class="lineno"> 1684</span>                                      <span class="keywordtype">int</span> priority);</div>
+<div class="line"><a name="l01697"></a><span class="lineno"> 1697</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a39d10b5d3d13635ad94bef1445306f45">MXKVStorePullRowSparseEx</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01698"></a><span class="lineno"> 1698</span>                                        mx_uint num,</div>
+<div class="line"><a name="l01699"></a><span class="lineno"> 1699</span>                                        <span class="keyword">const</span> <span class="keywordtype">char</span>** keys,</div>
+<div class="line"><a name="l01700"></a><span class="lineno"> 1700</span>                                        NDArrayHandle* vals,</div>
+<div class="line"><a name="l01701"></a><span class="lineno"> 1701</span>                                        <span class="keyword">const</span> NDArrayHandle* row_ids,</div>
+<div class="line"><a name="l01702"></a><span class="lineno"> 1702</span>                                        <span class="keywordtype">int</span> priority);</div>
+<div class="line"><a name="l01703"></a><span class="lineno"> 1703</span> </div>
+<div class="line"><a name="l01712"></a><span class="lineno"><a class="line" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b"> 1712</a></span> <span class="keyword">typedef</span> void (<a class="code" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a>)(<span class="keywordtype">int</span> key,</div>
+<div class="line"><a name="l01713"></a><span class="lineno"> 1713</span>                                 NDArrayHandle recv,</div>
+<div class="line"><a name="l01714"></a><span class="lineno"> 1714</span>                                 NDArrayHandle local,</div>
+<div class="line"><a name="l01715"></a><span class="lineno"> 1715</span>                                 <span class="keywordtype">void</span> *handle);</div>
+<div class="line"><a name="l01724"></a><span class="lineno"><a class="line" href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42"> 1724</a></span> <span class="keyword">typedef</span> void (<a class="code" href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42">MXKVStoreStrUpdater</a>)(<span class="keyword">const</span> <span class="keywordtype">char</span>* key,</div>
+<div class="line"><a name="l01725"></a><span class="lineno"> 1725</span>                                    NDArrayHandle recv,</div>
+<div class="line"><a name="l01726"></a><span class="lineno"> 1726</span>                                    NDArrayHandle local,</div>
+<div class="line"><a name="l01727"></a><span class="lineno"> 1727</span>                                    <span class="keywordtype">void</span> *handle);</div>
+<div class="line"><a name="l01735"></a><span class="lineno"> 1735</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#adbdd2035afce961837866c711af4f0ab">MXKVStoreSetUpdater</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01736"></a><span class="lineno"> 1736</span>                                   <a class="code" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a> updater,</div>
+<div class="line"><a name="l01737"></a><span class="lineno"> 1737</span>                                   <span class="keywordtype">void</span> *updater_handle);</div>
+<div class="line"><a name="l01746"></a><span class="lineno"> 1746</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a70e08b32e56ad80a3557f831f6fd3b50">MXKVStoreSetUpdaterEx</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01747"></a><span class="lineno"> 1747</span>                                     <a class="code" href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a> updater,</div>
+<div class="line"><a name="l01748"></a><span class="lineno"> 1748</span>                                     <a class="code" href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42">MXKVStoreStrUpdater</a> str_updater,</div>
+<div class="line"><a name="l01749"></a><span class="lineno"> 1749</span>                                     <span class="keywordtype">void</span> *updater_handle);</div>
+<div class="line"><a name="l01756"></a><span class="lineno"> 1756</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a9dba9ada3ed98c76fe78221013e37f07">MXKVStoreGetType</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01757"></a><span class="lineno"> 1757</span>                                <span class="keyword">const</span> <span class="keywordtype">char</span>** type);</div>
+<div class="line"><a name="l01758"></a><span class="lineno"> 1758</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01759"></a><span class="lineno"> 1759</span> <span class="comment">// Part 6: advanced KVStore for multi-machines</span></div>
+<div class="line"><a name="l01760"></a><span class="lineno"> 1760</span> <span class="comment">//--------------------------------------------</span></div>
+<div class="line"><a name="l01761"></a><span class="lineno"> 1761</span> </div>
+<div class="line"><a name="l01769"></a><span class="lineno"> 1769</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1edf063b82bdd94d1f5214056dd55144">MXKVStoreGetRank</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01770"></a><span class="lineno"> 1770</span>                                <span class="keywordtype">int</span> *ret);</div>
+<div class="line"><a name="l01771"></a><span class="lineno"> 1771</span> </div>
+<div class="line"><a name="l01781"></a><span class="lineno"> 1781</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aac41a2bd1dfcdebb920c97eab40ea07b">MXKVStoreGetGroupSize</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01782"></a><span class="lineno"> 1782</span>                                     <span class="keywordtype">int</span> *ret);</div>
+<div class="line"><a name="l01783"></a><span class="lineno"> 1783</span> </div>
+<div class="line"><a name="l01789"></a><span class="lineno"> 1789</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a67d719a0a5470f6292acb5896fbca4aa">MXKVStoreIsWorkerNode</a>(<span class="keywordtype">int</span> *ret);</div>
+<div class="line"><a name="l01790"></a><span class="lineno"> 1790</span> </div>
+<div class="line"><a name="l01791"></a><span class="lineno"> 1791</span> </div>
+<div class="line"><a name="l01797"></a><span class="lineno"> 1797</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a06bf33dff7bc03cdd7474460e515882d">MXKVStoreIsServerNode</a>(<span class="keywordtype">int</span> *ret);</div>
+<div class="line"><a name="l01798"></a><span class="lineno"> 1798</span> </div>
+<div class="line"><a name="l01799"></a><span class="lineno"> 1799</span> </div>
+<div class="line"><a name="l01805"></a><span class="lineno"> 1805</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aad401ff990cb4142070e024af7ac0781">MXKVStoreIsSchedulerNode</a>(<span class="keywordtype">int</span> *ret);</div>
+<div class="line"><a name="l01806"></a><span class="lineno"> 1806</span> </div>
+<div class="line"><a name="l01813"></a><span class="lineno"> 1813</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a96e8df6a3170c52b369954215dabe71c">MXKVStoreBarrier</a>(KVStoreHandle handle);</div>
+<div class="line"><a name="l01814"></a><span class="lineno"> 1814</span> </div>
+<div class="line"><a name="l01822"></a><span class="lineno"> 1822</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#add5b8e9bbd098e8fef5deab4c7ab2737">MXKVStoreSetBarrierBeforeExit</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01823"></a><span class="lineno"> 1823</span>                                             <span class="keyword">const</span> <span class="keywordtype">int</span> barrier_before_exit);</div>
+<div class="line"><a name="l01824"></a><span class="lineno"> 1824</span> </div>
+<div class="line"><a name="l01831"></a><span class="lineno"><a class="line" href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634"> 1831</a></span> <span class="keyword">typedef</span> void (<a class="code" href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634">MXKVStoreServerController</a>)(<span class="keywordtype">int</span> head,</div>
+<div class="line"><a name="l01832"></a><span class="lineno"> 1832</span>                                          <span class="keyword">const</span> <span class="keywordtype">char</span> *body,</div>
+<div class="line"><a name="l01833"></a><span class="lineno"> 1833</span>                                          <span class="keywordtype">void</span> *controller_handle);</div>
+<div class="line"><a name="l01834"></a><span class="lineno"> 1834</span> </div>
+<div class="line"><a name="l01843"></a><span class="lineno"> 1843</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a732c3edf14dc24784abb27fe2613a10a">MXKVStoreRunServer</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01844"></a><span class="lineno"> 1844</span>                                  <a class="code" href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634">MXKVStoreServerController</a> controller,</div>
+<div class="line"><a name="l01845"></a><span class="lineno"> 1845</span>                                  <span class="keywordtype">void</span> *controller_handle);</div>
+<div class="line"><a name="l01846"></a><span class="lineno"> 1846</span> </div>
+<div class="line"><a name="l01855"></a><span class="lineno"> 1855</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a5de45491037df7bf6b757b627100acd7">MXKVStoreSendCommmandToServers</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01856"></a><span class="lineno"> 1856</span>                                              <span class="keywordtype">int</span> cmd_id,</div>
+<div class="line"><a name="l01857"></a><span class="lineno"> 1857</span>                                              <span class="keyword">const</span> <span class="keywordtype">char</span>* cmd_body);</div>
+<div class="line"><a name="l01858"></a><span class="lineno"> 1858</span> </div>
+<div class="line"><a name="l01869"></a><span class="lineno"> 1869</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ac8b5138bcb7959cc831a97a24755e969">MXKVStoreGetNumDeadNode</a>(KVStoreHandle handle,</div>
+<div class="line"><a name="l01870"></a><span class="lineno"> 1870</span>                                       <span class="keyword">const</span> <span class="keywordtype">int</span> node_id,</div>
+<div class="line"><a name="l01871"></a><span class="lineno"> 1871</span>                                       <span class="keywordtype">int</span> *number,</div>
+<div class="line"><a name="l01872"></a><span class="lineno"> 1872</span>                                       <span class="keyword">const</span> <span class="keywordtype">int</span> timeout_sec <a class="code" href="c__api_8h.html#a2380be5ab258c3657553d0cef62936fe">DEFAULT</a>(60));</div>
+<div class="line"><a name="l01873"></a><span class="lineno"> 1873</span> </div>
+<div class="line"><a name="l01880"></a><span class="lineno"> 1880</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab2b93d730f48591b4f77f3968f6a1b98">MXRecordIOWriterCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span> *uri, RecordIOHandle *out);</div>
+<div class="line"><a name="l01881"></a><span class="lineno"> 1881</span> </div>
+<div class="line"><a name="l01887"></a><span class="lineno"> 1887</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab2a55917ffd918e334464f1b8a7a1ab9">MXRecordIOWriterFree</a>(RecordIOHandle handle);</div>
+<div class="line"><a name="l01888"></a><span class="lineno"> 1888</span> </div>
+<div class="line"><a name="l01896"></a><span class="lineno"> 1896</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a0c380b6aa00446e7cb6021c8e2df75f3">MXRecordIOWriterWriteRecord</a>(RecordIOHandle handle,</div>
+<div class="line"><a name="l01897"></a><span class="lineno"> 1897</span>                                           <span class="keyword">const</span> <span class="keywordtype">char</span> *buf, <span class="keywordtype">size_t</span> size);</div>
+<div class="line"><a name="l01898"></a><span class="lineno"> 1898</span> </div>
+<div class="line"><a name="l01905"></a><span class="lineno"> 1905</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a6cc8e9f94ec431edf8d9502f20d7b833">MXRecordIOWriterTell</a>(RecordIOHandle handle, <span class="keywordtype">size_t</span> *pos);</div>
+<div class="line"><a name="l01906"></a><span class="lineno"> 1906</span> </div>
+<div class="line"><a name="l01913"></a><span class="lineno"> 1913</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a4464ddbe13b8a4542d3f91bf055d50af">MXRecordIOReaderCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span> *uri, RecordIOHandle *out);</div>
+<div class="line"><a name="l01914"></a><span class="lineno"> 1914</span> </div>
+<div class="line"><a name="l01920"></a><span class="lineno"> 1920</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a92736f17a5d9fd58e53e621358e4ef31">MXRecordIOReaderFree</a>(RecordIOHandle handle);</div>
+<div class="line"><a name="l01921"></a><span class="lineno"> 1921</span> </div>
+<div class="line"><a name="l01929"></a><span class="lineno"> 1929</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a38e350f748967e0caa2c781a507667e4">MXRecordIOReaderReadRecord</a>(RecordIOHandle handle,</div>
+<div class="line"><a name="l01930"></a><span class="lineno"> 1930</span>                                         <span class="keywordtype">char</span> <span class="keyword">const</span> **buf, <span class="keywordtype">size_t</span> *size);</div>
+<div class="line"><a name="l01931"></a><span class="lineno"> 1931</span> </div>
+<div class="line"><a name="l01938"></a><span class="lineno"> 1938</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa992161083826ee6a434ade2790fbb31">MXRecordIOReaderSeek</a>(RecordIOHandle handle, <span class="keywordtype">size_t</span> pos);</div>
+<div class="line"><a name="l01939"></a><span class="lineno"> 1939</span> </div>
+<div class="line"><a name="l01946"></a><span class="lineno"> 1946</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a060d9a58a2e692af26fae433cecf7436">MXRecordIOReaderTell</a>(RecordIOHandle handle, <span class="keywordtype">size_t</span> *pos);</div>
+<div class="line"><a name="l01947"></a><span class="lineno"> 1947</span> </div>
+<div class="line"><a name="l01951"></a><span class="lineno"> 1951</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a87e98002ae1a925b65a8cf0773f802d3">MXRtcCreate</a>(<span class="keywordtype">char</span>* name, mx_uint num_input, mx_uint num_output,</div>
+<div class="line"><a name="l01952"></a><span class="lineno"> 1952</span>                           <span class="keywordtype">char</span>** input_names, <span class="keywordtype">char</span>** output_names,</div>
+<div class="line"><a name="l01953"></a><span class="lineno"> 1953</span>                           NDArrayHandle* inputs, NDArrayHandle* outputs,</div>
+<div class="line"><a name="l01954"></a><span class="lineno"> 1954</span>                           <span class="keywordtype">char</span>* kernel, RtcHandle *out);</div>
+<div class="line"><a name="l01955"></a><span class="lineno"> 1955</span> </div>
+<div class="line"><a name="l01959"></a><span class="lineno"> 1959</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a20f72cb4e18d5081eaf40875adcb6e98">MXRtcPush</a>(RtcHandle handle, mx_uint num_input, mx_uint num_output,</div>
+<div class="line"><a name="l01960"></a><span class="lineno"> 1960</span>                         NDArrayHandle* inputs, NDArrayHandle* outputs,</div>
+<div class="line"><a name="l01961"></a><span class="lineno"> 1961</span>                         mx_uint gridDimX,</div>
+<div class="line"><a name="l01962"></a><span class="lineno"> 1962</span>                         mx_uint gridDimY,</div>
+<div class="line"><a name="l01963"></a><span class="lineno"> 1963</span>                         mx_uint gridDimZ,</div>
+<div class="line"><a name="l01964"></a><span class="lineno"> 1964</span>                         mx_uint blockDimX,</div>
+<div class="line"><a name="l01965"></a><span class="lineno"> 1965</span>                         mx_uint blockDimY,</div>
+<div class="line"><a name="l01966"></a><span class="lineno"> 1966</span>                         mx_uint blockDimZ);</div>
+<div class="line"><a name="l01967"></a><span class="lineno"> 1967</span> </div>
+<div class="line"><a name="l01971"></a><span class="lineno"> 1971</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a13819b19ab3cdd91566b1d5cf8bc6b0a">MXRtcFree</a>(RtcHandle handle);</div>
+<div class="line"><a name="l01972"></a><span class="lineno"> 1972</span> <span class="comment">/*</span></div>
+<div class="line"><a name="l01973"></a><span class="lineno"> 1973</span> <span class="comment"> * \brief register custom operators from frontend.</span></div>
+<div class="line"><a name="l01974"></a><span class="lineno"> 1974</span> <span class="comment"> * \param op_type name of custom op</span></div>
+<div class="line"><a name="l01975"></a><span class="lineno"> 1975</span> <span class="comment"> * \param creator</span></div>
+<div class="line"><a name="l01976"></a><span class="lineno"> 1976</span> <span class="comment"> */</span></div>
+<div class="line"><a name="l01977"></a><span class="lineno"> 1977</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a92233cd9477ce12458bbd2b3e628dec9">MXCustomOpRegister</a>(<span class="keyword">const</span> <span class="keywordtype">char</span>* op_type, <a class="code" href="c__api_8h.html#a3a7fd875ee6aad3f20981764626a6c7b">CustomOpPropCreator</a> creator);</div>
 <div class="line"><a name="l01978"></a><span class="lineno"> 1978</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l01979"></a><span class="lineno"> 1979</span> <span class="comment"> * \brief create cuda rtc module</span></div>
-<div class="line"><a name="l01980"></a><span class="lineno"> 1980</span> <span class="comment"> * \param source cuda source code</span></div>
-<div class="line"><a name="l01981"></a><span class="lineno"> 1981</span> <span class="comment"> * \param num_options number of compiler flags</span></div>
-<div class="line"><a name="l01982"></a><span class="lineno"> 1982</span> <span class="comment"> * \param options compiler flags</span></div>
-<div class="line"><a name="l01983"></a><span class="lineno"> 1983</span> <span class="comment"> * \param num_exports number of exported function names</span></div>
-<div class="line"><a name="l01984"></a><span class="lineno"> 1984</span> <span class="comment"> * \param exported function names</span></div>
-<div class="line"><a name="l01985"></a><span class="lineno"> 1985</span> <span class="comment"> * \param out handle to created module</span></div>
-<div class="line"><a name="l01986"></a><span class="lineno"> 1986</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l01987"></a><span class="lineno"> 1987</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a19d9053892c51c0b89f0e7698ff9c24c">MXRtcCudaModuleCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span>* source, <span class="keywordtype">int</span> num_options,</div>
-<div class="line"><a name="l01988"></a><span class="lineno"> 1988</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span>** options, <span class="keywordtype">int</span> num_exports,</div>
-<div class="line"><a name="l01989"></a><span class="lineno"> 1989</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span>** exports, CudaModuleHandle *out);</div>
-<div class="line"><a name="l01990"></a><span class="lineno"> 1990</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l01991"></a><span class="lineno"> 1991</span> <span class="comment"> * \brief delete cuda rtc module</span></div>
-<div class="line"><a name="l01992"></a><span class="lineno"> 1992</span> <span class="comment"> * \param handle handle to cuda module</span></div>
-<div class="line"><a name="l01993"></a><span class="lineno"> 1993</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l01994"></a><span class="lineno"> 1994</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1f93e2016c7b3cd869a5741201b5508c">MXRtcCudaModuleFree</a>(CudaModuleHandle handle);</div>
-<div class="line"><a name="l01995"></a><span class="lineno"> 1995</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l01996"></a><span class="lineno"> 1996</span> <span class="comment"> * \brief get kernel from module</span></div>
-<div class="line"><a name="l01997"></a><span class="lineno"> 1997</span> <span class="comment"> * \param handle handle to cuda module</span></div>
-<div class="line"><a name="l01998"></a><span class="lineno"> 1998</span> <span class="comment"> * \param name name of kernel function</span></div>
-<div class="line"><a name="l01999"></a><span class="lineno"> 1999</span> <span class="comment"> * \param num_args number of arguments</span></div>
-<div class="line"><a name="l02000"></a><span class="lineno"> 2000</span> <span class="comment"> * \param is_ndarray whether argument is ndarray</span></div>
-<div class="line"><a name="l02001"></a><span class="lineno"> 2001</span> <span class="comment"> * \param is_const whether argument is constant</span></div>
-<div class="line"><a name="l02002"></a><span class="lineno"> 2002</span> <span class="comment"> * \param arg_types data type of arguments</span></div>
-<div class="line"><a name="l02003"></a><span class="lineno"> 2003</span> <span class="comment"> * \param out created kernel</span></div>
+<div class="line"><a name="l01979"></a><span class="lineno"> 1979</span> <span class="comment"> * \brief record custom function for backward later.</span></div>
+<div class="line"><a name="l01980"></a><span class="lineno"> 1980</span> <span class="comment"> * \param num_inputs number of input NDArrays.</span></div>
+<div class="line"><a name="l01981"></a><span class="lineno"> 1981</span> <span class="comment"> * \param inputs handle to input NDArrays.</span></div>
+<div class="line"><a name="l01982"></a><span class="lineno"> 1982</span> <span class="comment"> * \param num_outputs number of output NDArrays.</span></div>
+<div class="line"><a name="l01983"></a><span class="lineno"> 1983</span> <span class="comment"> * \param outputs handle to output NDArrays.</span></div>
+<div class="line"><a name="l01984"></a><span class="lineno"> 1984</span> <span class="comment"> * \param callbacks callbacks for backward function.</span></div>
+<div class="line"><a name="l01985"></a><span class="lineno"> 1985</span> <span class="comment"> */</span></div>
+<div class="line"><a name="l01986"></a><span class="lineno"> 1986</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ae1f7686cd08133d865ed6b9ea23eb31b">MXCustomFunctionRecord</a>(<span class="keywordtype">int</span> num_inputs, NDArrayHandle *inputs,</div>
+<div class="line"><a name="l01987"></a><span class="lineno"> 1987</span>                                      <span class="keywordtype">int</span> num_outputs, NDArrayHandle *outputs,</div>
+<div class="line"><a name="l01988"></a><span class="lineno"> 1988</span>                                      <span class="keyword">struct</span> <a class="code" href="structMXCallbackList.html">MXCallbackList</a> *<a class="code" href="structMXCallbackList.html#ad56325e57f5fffc2b920eb3f8f378199">callbacks</a>);</div>
+<div class="line"><a name="l01989"></a><span class="lineno"> 1989</span> <span class="comment">/*</span></div>
+<div class="line"><a name="l01990"></a><span class="lineno"> 1990</span> <span class="comment"> * \brief create cuda rtc module</span></div>
+<div class="line"><a name="l01991"></a><span class="lineno"> 1991</span> <span class="comment"> * \param source cuda source code</span></div>
+<div class="line"><a name="l01992"></a><span class="lineno"> 1992</span> <span class="comment"> * \param num_options number of compiler flags</span></div>
+<div class="line"><a name="l01993"></a><span class="lineno"> 1993</span> <span class="comment"> * \param options compiler flags</span></div>
+<div class="line"><a name="l01994"></a><span class="lineno"> 1994</span> <span class="comment"> * \param num_exports number of exported function names</span></div>
+<div class="line"><a name="l01995"></a><span class="lineno"> 1995</span> <span class="comment"> * \param exported function names</span></div>
+<div class="line"><a name="l01996"></a><span class="lineno"> 1996</span> <span class="comment"> * \param out handle to created module</span></div>
+<div class="line"><a name="l01997"></a><span class="lineno"> 1997</span> <span class="comment"> */</span></div>
+<div class="line"><a name="l01998"></a><span class="lineno"> 1998</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a19d9053892c51c0b89f0e7698ff9c24c">MXRtcCudaModuleCreate</a>(<span class="keyword">const</span> <span class="keywordtype">char</span>* source, <span class="keywordtype">int</span> num_options,</div>
+<div class="line"><a name="l01999"></a><span class="lineno"> 1999</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span>** options, <span class="keywordtype">int</span> num_exports,</div>
+<div class="line"><a name="l02000"></a><span class="lineno"> 2000</span>                                     <span class="keyword">const</span> <span class="keywordtype">char</span>** exports, CudaModuleHandle *out);</div>
+<div class="line"><a name="l02001"></a><span class="lineno"> 2001</span> <span class="comment">/*</span></div>
+<div class="line"><a name="l02002"></a><span class="lineno"> 2002</span> <span class="comment"> * \brief delete cuda rtc module</span></div>
+<div class="line"><a name="l02003"></a><span class="lineno"> 2003</span> <span class="comment"> * \param handle handle to cuda module</span></div>
 <div class="line"><a name="l02004"></a><span class="lineno"> 2004</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l02005"></a><span class="lineno"> 2005</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa58e90c31701d59826a76bf0efaf4db5">MXRtcCudaKernelCreate</a>(CudaModuleHandle handle, <span class="keyword">const</span> <span class="keywordtype">char</span>* name,</div>
-<div class="line"><a name="l02006"></a><span class="lineno"> 2006</span>                                     <span class="keywordtype">int</span> num_args, <span class="keywordtype">int</span>* is_ndarray, <span class="keywordtype">int</span>* is_const,</div>
-<div class="line"><a name="l02007"></a><span class="lineno"> 2007</span>                                     <span class="keywordtype">int</span>* arg_types, CudaKernelHandle *out);</div>
-<div class="line"><a name="l02008"></a><span class="lineno"> 2008</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l02009"></a><span class="lineno"> 2009</span> <span class="comment"> * \brief delete kernel</span></div>
-<div class="line"><a name="l02010"></a><span class="lineno"> 2010</span> <span class="comment"> * \param handle handle to previously created kernel</span></div>
-<div class="line"><a name="l02011"></a><span class="lineno"> 2011</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l02012"></a><span class="lineno"> 2012</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a025928c86657eb708e45103b8e3f02d1">MXRtcCudaKernelFree</a>(CudaKernelHandle handle);</div>
-<div class="line"><a name="l02013"></a><span class="lineno"> 2013</span> <span class="comment">/*</span></div>
-<div class="line"><a name="l02014"></a><span class="lineno"> 2014</span> <span class="comment"> * \brief launch cuda kernel</span></div>
-<div class="line"><a name="l02015"></a><span class="lineno"> 2015</span> <span class="comment"> * \param handle handle to kernel</span></div>
-<div class="line"><a name="l02016"></a><span class="lineno"> 2016</span> <span class="comment"> * \param dev_id (GPU) device id</span></div>
-<div class="line"><a name="l02017"></a><span class="lineno"> 2017</span> <span class="comment"> * \param args pointer to arguments</span></div>
-<div class="line"><a name="l02018"></a><span class="lineno"> 2018</span> <span class="comment"> * \param grid_dim_x grid dimension x</span></div>
-<div class="line"><a name="l02019"></a><span class="lineno"> 2019</span> <span class="comment"> * \param grid_dim_y grid dimension y</span></div>
-<div class="line"><a name="l02020"></a><span class="lineno"> 2020</span> <span class="comment"> * \param grid_dim_z grid dimension z</span></div>
-<div class="line"><a name="l02021"></a><span class="lineno"> 2021</span> <span class="comment"> * \param block_dim_x block dimension x</span></div>
-<div class="line"><a name="l02022"></a><span class="lineno"> 2022</span> <span class="comment"> * \param block_dim_y block dimension y</span></div>
-<div class="line"><a name="l02023"></a><span class="lineno"> 2023</span> <span class="comment"> * \param block_dim_z block dimension z</span></div>
-<div class="line"><a name="l02024"></a><span class="lineno"> 2024</span> <span class="comment"> * \param shared_mem size of dynamically allocated shared memory</span></div>
-<div class="line"><a name="l02025"></a><span class="lineno"> 2025</span> <span class="comment"> */</span></div>
-<div class="line"><a name="l02026"></a><span class="lineno"> 2026</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab538e41bfc6c0ce3185676dd028f8ccb">MXRtcCudaKernelCall</a>(CudaKernelHandle handle, <span class="keywordtype">int</span> dev_id, <span class="keywordtype">void</span>** args,</div>
-<div class="line"><a name="l02027"></a><span class="lineno"> 2027</span>                                   mx_uint grid_dim_x, mx_uint grid_dim_y,</div>
-<div class="line"><a name="l02028"></a><span class="lineno"> 2028</span>                                   mx_uint grid_dim_z, mx_uint block_dim_x,</div>
-<div class="line"><a name="l02029"></a><span class="lineno"> 2029</span>                                   mx_uint block_dim_y, mx_uint block_dim_z,</div>
-<div class="line"><a name="l02030"></a><span class="lineno"> 2030</span>                                   mx_uint shared_mem);</div>
-<div class="line"><a name="l02037"></a><span class="lineno"> 2037</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a3958af27157b6bacea10e87440180669">MXNDArrayGetSharedMemHandle</a>(NDArrayHandle handle, <span class="keywordtype">int</span>* shared_pid,</div>
-<div class="line"><a name="l02038"></a><span class="lineno"> 2038</span>                                           <span class="keywordtype">int</span>* shared_id);</div>
-<div class="line"><a name="l02048"></a><span class="lineno"> 2048</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a260571f9748de6eb097ea6d0f0a8b915">MXNDArrayCreateFromSharedMem</a>(<span class="keywordtype">int</span> shared_pid, <span class="keywordtype">int</span> shared_id, <span class="keyword">const</span> mx_uint *shape,</div>
-<div class="line"><a name="l02049"></a><span class="lineno"> 2049</span>                                            mx_uint ndim, <span class="keywordtype">int</span> dtype, NDArrayHandle *out);</div>
-<div class="line"><a name="l02050"></a><span class="lineno"> 2050</span> </div>
-<div class="line"><a name="l02051"></a><span class="lineno"> 2051</span> </div>
-<div class="line"><a name="l02052"></a><span class="lineno"> 2052</span> <span class="preprocessor">#ifdef __cplusplus</span></div>
-<div class="line"><a name="l02053"></a><span class="lineno"> 2053</span> <span class="preprocessor"></span>}</div>
-<div class="line"><a name="l02054"></a><span class="lineno"> 2054</span> <span class="preprocessor">#endif  // __cplusplus</span></div>
-<div class="line"><a name="l02055"></a><span class="lineno"> 2055</span> <span class="preprocessor"></span></div>
-<div class="line"><a name="l02056"></a><span class="lineno"> 2056</span> <span class="preprocessor">#endif  // MXNET_C_API_H_</span></div>
+<div class="line"><a name="l02005"></a><span class="lineno"> 2005</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a1f93e2016c7b3cd869a5741201b5508c">MXRtcCudaModuleFree</a>(CudaModuleHandle handle);</div>
+<div class="line"><a name="l02006"></a><span class="lineno"> 2006</span> <span class="comment">/*</span></div>
+<div class="line"><a name="l02007"></a><span class="lineno"> 2007</span> <span class="comment"> * \brief get kernel from module</span></div>
+<div class="line"><a name="l02008"></a><span class="lineno"> 2008</span> <span class="comment"> * \param handle handle to cuda module</span></div>
+<div class="line"><a name="l02009"></a><span class="lineno"> 2009</span> <span class="comment"> * \param name name of kernel function</span></div>
+<div class="line"><a name="l02010"></a><span class="lineno"> 2010</span> <span class="comment"> * \param num_args number of arguments</span></div>
+<div class="line"><a name="l02011"></a><span class="lineno"> 2011</span> <span class="comment"> * \param is_ndarray whether argument is ndarray</span></div>
+<div class="line"><a name="l02012"></a><span class="lineno"> 2012</span> <span class="comment"> * \param is_const whether argument is constant</span></div>
+<div class="line"><a name="l02013"></a><span class="lineno"> 2013</span> <span class="comment"> * \param arg_types data type of arguments</span></div>
+<div class="line"><a name="l02014"></a><span class="lineno"> 2014</span> <span class="comment"> * \param out created kernel</span></div>
+<div class="line"><a name="l02015"></a><span class="lineno"> 2015</span> <span class="comment"> */</span></div>
+<div class="line"><a name="l02016"></a><span class="lineno"> 2016</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#aa58e90c31701d59826a76bf0efaf4db5">MXRtcCudaKernelCreate</a>(CudaModuleHandle handle, <span class="keyword">const</span> <span class="keywordtype">char</span>* name,</div>
+<div class="line"><a name="l02017"></a><span class="lineno"> 2017</span>                                     <span class="keywordtype">int</span> num_args, <span class="keywordtype">int</span>* is_ndarray, <span class="keywordtype">int</span>* is_const,</div>
+<div class="line"><a name="l02018"></a><span class="lineno"> 2018</span>                                     <span class="keywordtype">int</span>* arg_types, CudaKernelHandle *out);</div>
+<div class="line"><a name="l02019"></a><span class="lineno"> 2019</span> <span class="comment">/*</span></div>
+<div class="line"><a name="l02020"></a><span class="lineno"> 2020</span> <span class="comment"> * \brief delete kernel</span></div>
+<div class="line"><a name="l02021"></a><span class="lineno"> 2021</span> <span class="comment"> * \param handle handle to previously created kernel</span></div>
+<div class="line"><a name="l02022"></a><span class="lineno"> 2022</span> <span class="comment"> */</span></div>
+<div class="line"><a name="l02023"></a><span class="lineno"> 2023</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a025928c86657eb708e45103b8e3f02d1">MXRtcCudaKernelFree</a>(CudaKernelHandle handle);</div>
+<div class="line"><a name="l02024"></a><span class="lineno"> 2024</span> <span class="comment">/*</span></div>
+<div class="line"><a name="l02025"></a><span class="lineno"> 2025</span> <span class="comment"> * \brief launch cuda kernel</span></div>
+<div class="line"><a name="l02026"></a><span class="lineno"> 2026</span> <span class="comment"> * \param handle handle to kernel</span></div>
+<div class="line"><a name="l02027"></a><span class="lineno"> 2027</span> <span class="comment"> * \param dev_id (GPU) device id</span></div>
+<div class="line"><a name="l02028"></a><span class="lineno"> 2028</span> <span class="comment"> * \param args pointer to arguments</span></div>
+<div class="line"><a name="l02029"></a><span class="lineno"> 2029</span> <span class="comment"> * \param grid_dim_x grid dimension x</span></div>
+<div class="line"><a name="l02030"></a><span class="lineno"> 2030</span> <span class="comment"> * \param grid_dim_y grid dimension y</span></div>
+<div class="line"><a name="l02031"></a><span class="lineno"> 2031</span> <span class="comment"> * \param grid_dim_z grid dimension z</span></div>
+<div class="line"><a name="l02032"></a><span class="lineno"> 2032</span> <span class="comment"> * \param block_dim_x block dimension x</span></div>
+<div class="line"><a name="l02033"></a><span class="lineno"> 2033</span> <span class="comment"> * \param block_dim_y block dimension y</span></div>
+<div class="line"><a name="l02034"></a><span class="lineno"> 2034</span> <span class="comment"> * \param block_dim_z block dimension z</span></div>
+<div class="line"><a name="l02035"></a><span class="lineno"> 2035</span> <span class="comment"> * \param shared_mem size of dynamically allocated shared memory</span></div>
+<div class="line"><a name="l02036"></a><span class="lineno"> 2036</span> <span class="comment"> */</span></div>
+<div class="line"><a name="l02037"></a><span class="lineno"> 2037</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#ab538e41bfc6c0ce3185676dd028f8ccb">MXRtcCudaKernelCall</a>(CudaKernelHandle handle, <span class="keywordtype">int</span> dev_id, <span class="keywordtype">void</span>** args,</div>
+<div class="line"><a name="l02038"></a><span class="lineno"> 2038</span>                                   mx_uint grid_dim_x, mx_uint grid_dim_y,</div>
+<div class="line"><a name="l02039"></a><span class="lineno"> 2039</span>                                   mx_uint grid_dim_z, mx_uint block_dim_x,</div>
+<div class="line"><a name="l02040"></a><span class="lineno"> 2040</span>                                   mx_uint block_dim_y, mx_uint block_dim_z,</div>
+<div class="line"><a name="l02041"></a><span class="lineno"> 2041</span>                                   mx_uint shared_mem);</div>
+<div class="line"><a name="l02048"></a><span class="lineno"> 2048</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a3958af27157b6bacea10e87440180669">MXNDArrayGetSharedMemHandle</a>(NDArrayHandle handle, <span class="keywordtype">int</span>* shared_pid,</div>
+<div class="line"><a name="l02049"></a><span class="lineno"> 2049</span>                                           <span class="keywordtype">int</span>* shared_id);</div>
+<div class="line"><a name="l02059"></a><span class="lineno"> 2059</span> <a class="code" href="c__api_8h.html#a5b0e47dde55f9b2f58b99fab92d1e0ac">MXNET_DLL</a> <span class="keywordtype">int</span> <a class="code" href="c__api_8h.html#a260571f9748de6eb097ea6d0f0a8b915">MXNDArrayCreateFromSharedMem</a>(<span class="keywordtype">int</span> shared_pid, <span class="keywordtype">int</span> shared_id, <span class="keyword">const</span> mx_uint *shape,</div>
+<div class="line"><a name="l02060"></a><span class="lineno"> 2060</span>                                            mx_uint ndim, <span class="keywordtype">int</span> dtype, NDArrayHandle *out);</div>
+<div class="line"><a name="l02061"></a><span class="lineno"> 2061</span> </div>
+<div class="line"><a name="l02062"></a><span class="lineno"> 2062</span> </div>
+<div class="line"><a name="l02063"></a><span class="lineno"> 2063</span> <span class="preprocessor">#ifdef __cplusplus</span></div>
+<div class="line"><a name="l02064"></a><span class="lineno"> 2064</span> <span class="preprocessor"></span>}</div>
+<div class="line"><a name="l02065"></a><span class="lineno"> 2065</span> <span class="preprocessor">#endif  // __cplusplus</span></div>
+<div class="line"><a name="l02066"></a><span class="lineno"> 2066</span> <span class="preprocessor"></span></div>
+<div class="line"><a name="l02067"></a><span class="lineno"> 2067</span> <span class="preprocessor">#endif  // MXNET_C_API_H_</span></div>
 <div class="ttc" id="c__api_8h_html_a5de45491037df7bf6b757b627100acd7"><div class="ttname"><a href="c__api_8h.html#a5de45491037df7bf6b757b627100acd7">MXKVStoreSendCommmandToServers</a></div><div class="ttdeci">MXNET_DLL int MXKVStoreSendCommmandToServers(KVStoreHandle handle, int cmd_id, const char *cmd_body)</div></div>
 <div class="ttc" id="structNDArrayOpInfo_html_ad6bb579606e6bd83569d035fa92a857c"><div class="ttname"><a href="structNDArrayOpInfo.html#ad6bb579606e6bd83569d035fa92a857c">NDArrayOpInfo::list_arguments</a></div><div class="ttdeci">bool(* list_arguments)(char ***, void *)</div><div class="ttdef"><b>Definition:</b> c_api.h:116</div></div>
 <div class="ttc" id="c__api_8h_html_a8e2c90cfa6c52a012fd3f23ffd8541f2"><div class="ttname"><a href="c__api_8h.html#a8e2c90cfa6c52a012fd3f23ffd8541f2">MXNDArrayDetach</a></div><div class="ttdeci">MXNET_DLL int MXNDArrayDetach(NDArrayHandle handle, NDArrayHandle *out)</div><div class="ttdoc">detach and ndarray from computation graph by clearing entry_ </div></div>
@@ -951,6 +956,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="ttc" id="c__api_8h_html_a5f8937c0b27a527c8b62adb7a4704d83"><div class="ttname"><a href="c__api_8h.html#a5f8937c0b27a527c8b62adb7a4704d83">MXNDArrayGetAuxType</a></div><div class="ttdeci">MXNET_DLL int MXNDArrayGetAuxType(NDArrayHandle handle, mx_uint i, int *out_type)</div><div class="ttdoc">get the type of the ith aux data in NDArray </div></div>
 <div class="ttc" id="c__api_8h_html_adbdd2035afce961837866c711af4f0ab"><div class="ttname"><a href="c__api_8h.html#adbdd2035afce961837866c711af4f0ab">MXKVStoreSetUpdater</a></div><div class="ttdeci">MXNET_DLL int MXKVStoreSetUpdater(KVStoreHandle handle, MXKVStoreUpdater updater, void *updater_handle)</div><div class="ttdoc">register a push updater </div></div>
 <div class="ttc" id="c__api_8h_html_a96e8df6a3170c52b369954215dabe71c"><div class="ttname"><a href="c__api_8h.html#a96e8df6a3170c52b369954215dabe71c">MXKVStoreBarrier</a></div><div class="ttdeci">MXNET_DLL int MXKVStoreBarrier(KVStoreHandle handle)</div><div class="ttdoc">global barrier among all worker machines </div></div>
+<div class="ttc" id="c__api_8h_html_a970b94e3dfb72a2a76065149180c4f1f"><div class="ttname"><a href="c__api_8h.html#a970b94e3dfb72a2a76065149180c4f1f">MXSymbolGetNumOutputs</a></div><div class="ttdeci">MXNET_DLL int MXSymbolGetNumOutputs(SymbolHandle symbol, mx_uint *output_count)</div><div class="ttdoc">Get number of outputs of the symbol. </div></div>
 <div class="ttc" id="c__api_8h_html_abce84fe05f55709d643bcfd0a4e4620b"><div class="ttname"><a href="c__api_8h.html#abce84fe05f55709d643bcfd0a4e4620b">DataIterHandle</a></div><div class="ttdeci">void * DataIterHandle</div><div class="ttdoc">handle to a DataIterator </div><div class="ttdef"><b>Definition:</b> c_api.h:81</div></div>
 <div class="ttc" id="c__api_8h_html_a6eb35d17154ef093856e60e416d354cc"><div class="ttname"><a href="c__api_8h.html#a6eb35d17154ef093856e60e416d354cc">MXSymbolListAtomicSymbolCreators</a></div><div class="ttdeci">MXNET_DLL int MXSymbolListAtomicSymbolCreators(mx_uint *out_size, AtomicSymbolCreator **out_array)</div><div class="ttdoc">list all the available AtomicSymbolEntry </div></div>
 <div class="ttc" id="c__api_8h_html_aa2395b69772d66cb5c6c4cbe47749792a2d5d86c1a9bac7de4ceb1c641a4dce4a"><div class="ttname"><a href="c__api_8h.html#aa2395b69772d66cb5c6c4cbe47749792a2d5d86c1a9bac7de4ceb1c641a4dce4a">kCustomOpDelete</a></div><div class="ttdef"><b>Definition:</b> c_api.h:137</div></div>
@@ -1047,7 +1053,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="ttc" id="c__api_8h_html_a21df19f3242a68d1c536783fcf35968a"><div class="ttname"><a href="c__api_8h.html#a21df19f3242a68d1c536783fcf35968a">MXDataIterBeforeFirst</a></div><div class="ttdeci">MXNET_DLL int MXDataIterBeforeFirst(DataIterHandle handle)</div><div class="ttdoc">Call iterator.Reset. </div></div>
 <div class="ttc" id="c__api_8h_html_a664c85b47c69b81be3b9ea1dfcb72a94"><div class="ttname"><a href="c__api_8h.html#a664c85b47c69b81be3b9ea1dfcb72a94">MXSymbolInferShapePartial</a></div><div class="ttdeci">MXNET_DLL int MXSymbolInferShapePartial(SymbolHandle sym, mx_uint num_args, const char **keys, const mx_uint *arg_ind_ptr, const mx_uint *arg_shape_data, mx_uint *in_shape_size, const mx_uint **in_shape_ndim, const mx_uint ***in_shape_data, mx_uint *out_shape_size, const mx_uint **out_s [...]
 <div class="ttc" id="c__api_8h_html_aedf1da726aaacb87a5d959ed83f03f41"><div class="ttname"><a href="c__api_8h.html#aedf1da726aaacb87a5d959ed83f03f41">MXSymbolListOutputs</a></div><div class="ttdeci">MXNET_DLL int MXSymbolListOutputs(SymbolHandle symbol, mx_uint *out_size, const char ***out_str_array)</div><div class="ttdoc">List returns in the symbol. </div></div>
-<div class="ttc" id="c__api_8h_html_af59143cc01b560bb13f25a289b83f37b"><div class="ttname"><a href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a></div><div class="ttdeci">void( MXKVStoreUpdater)(int key, NDArrayHandle recv, NDArrayHandle local, void *handle)</div><div class="ttdoc">user-defined updater for the kvstore It's this updater's responsibility to delete recv and local ...</div><div class="ttdef"><b>Definition:</b> c_api.h:1701</div></div>
+<div class="ttc" id="c__api_8h_html_af59143cc01b560bb13f25a289b83f37b"><div class="ttname"><a href="c__api_8h.html#af59143cc01b560bb13f25a289b83f37b">MXKVStoreUpdater</a></div><div class="ttdeci">void( MXKVStoreUpdater)(int key, NDArrayHandle recv, NDArrayHandle local, void *handle)</div><div class="ttdoc">user-defined updater for the kvstore It's this updater's responsibility to delete recv and local ...</div><div class="ttdef"><b>Definition:</b> c_api.h:1712</div></div>
 <div class="ttc" id="c__api_8h_html_aa13fb35019b0c42dcfee185b2f9e09d0"><div class="ttname"><a href="c__api_8h.html#aa13fb35019b0c42dcfee185b2f9e09d0">MXListDataIters</a></div><div class="ttdeci">MXNET_DLL int MXListDataIters(mx_uint *out_size, DataIterCreator **out_array)</div><div class="ttdoc">List all the available iterator entries. </div></div>
 <div class="ttc" id="c__api_8h_html_a2035651f4392d249d1b904d5eb0c3406"><div class="ttname"><a href="c__api_8h.html#a2035651f4392d249d1b904d5eb0c3406">MXNDArrayGetShape</a></div><div class="ttdeci">MXNET_DLL int MXNDArrayGetShape(NDArrayHandle handle, mx_uint *out_dim, const mx_uint **out_pdata)</div><div class="ttdoc">get the shape of the array </div></div>
 <div class="ttc" id="c__api_8h_html_aec6d0d5055719576337687fc3702a899"><div class="ttname"><a href="c__api_8h.html#aec6d0d5055719576337687fc3702a899">MXDataIterGetPadNum</a></div><div class="ttdeci">MXNET_DLL int MXDataIterGetPadNum(DataIterHandle handle, int *pad)</div><div class="ttdoc">Get the padding number in current data batch. </div></div>
@@ -1061,7 +1067,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="ttc" id="c__api_8h_html_a69cfaa5de4b9d87070d4823700e64a1d"><div class="ttname"><a href="c__api_8h.html#a69cfaa5de4b9d87070d4823700e64a1d">MXNDArrayGetDType</a></div><div class="ttdeci">MXNET_DLL int MXNDArrayGetDType(NDArrayHandle handle, int *out_dtype)</div><div class="ttdoc">get the type of the data in NDArray </div></div>
 <div class="ttc" id="c__api_8h_html_ac0ccb3977d0f8a7ae076bd55a5f15115"><div class="ttname"><a href="c__api_8h.html#ac0ccb3977d0f8a7ae076bd55a5f15115">MXAutogradIsTraining</a></div><div class="ttdeci">MXNET_DLL int MXAutogradIsTraining(bool *curr)</div><div class="ttdoc">get whether training mode is on </div></div>
 <div class="ttc" id="c__api_8h_html_ad14437f7eeba78c360f9717c6b5af177"><div class="ttname"><a href="c__api_8h.html#ad14437f7eeba78c360f9717c6b5af177">MXAutogradBackwardEx</a></div><div class="ttdeci">MXNET_DLL int MXAutogradBackwardEx(mx_uint num_output, NDArrayHandle *output_handles, NDArrayHandle *ograd_handles, mx_uint num_variables, NDArrayHandle *var_handles, int retain_graph, int create_graph, int is_train, NDArrayHandle **grad_handles, int **grad_stypes)</div><div class="ttdoc">co [...]
-<div class="ttc" id="c__api_8h_html_a38e5829512ef685f1dd1b1d1fac93c42"><div class="ttname"><a href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42">MXKVStoreStrUpdater</a></div><div class="ttdeci">void( MXKVStoreStrUpdater)(const char *key, NDArrayHandle recv, NDArrayHandle local, void *handle)</div><div class="ttdoc">user-defined updater for the kvstore with string keys It's this updater's responsibility to delete re...</div><div class="ttdef"><b>Definition:</b> c_api.h:1713</div></div>
+<div class="ttc" id="c__api_8h_html_a38e5829512ef685f1dd1b1d1fac93c42"><div class="ttname"><a href="c__api_8h.html#a38e5829512ef685f1dd1b1d1fac93c42">MXKVStoreStrUpdater</a></div><div class="ttdeci">void( MXKVStoreStrUpdater)(const char *key, NDArrayHandle recv, NDArrayHandle local, void *handle)</div><div class="ttdoc">user-defined updater for the kvstore with string keys It's this updater's responsibility to delete re...</div><div class="ttdef"><b>Definition:</b> c_api.h:1724</div></div>
 <div class="ttc" id="c__api_8h_html_a717cd8414c58e4069503725ba4dfcb33a215bf764504a913342cb45568f350757"><div class="ttname"><a href="c__api_8h.html#a717cd8414c58e4069503725ba4dfcb33a215bf764504a913342cb45568f350757">kCustomFunctionBackward</a></div><div class="ttdef"><b>Definition:</b> c_api.h:175</div></div>
 <div class="ttc" id="c__api_8h_html_a13816872b73d6c474686d985747e8c65"><div class="ttname"><a href="c__api_8h.html#a13816872b73d6c474686d985747e8c65">MXSymbolGetInternals</a></div><div class="ttdeci">MXNET_DLL int MXSymbolGetInternals(SymbolHandle symbol, SymbolHandle *out)</div><div class="ttdoc">Get a symbol that contains all the internals. </div></div>
 <div class="ttc" id="c__api_8h_html_a47a7255026a8f13672c3717896fdb356"><div class="ttname"><a href="c__api_8h.html#a47a7255026a8f13672c3717896fdb356">MXNDArrayGetDataNDArray</a></div><div class="ttdeci">MXNET_DLL int MXNDArrayGetDataNDArray(NDArrayHandle handle, NDArrayHandle *out)</div><div class="ttdoc">Get a deep copy of the data blob in the form of an NDArray of default storage type. This function blocks. Do not use it in performance critical code. </div></div>
@@ -1144,7 +1150,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="ttc" id="c__api_8h_html_a638b8ad3ae66ff77d17bd3ee504f12cc"><div class="ttname"><a href="c__api_8h.html#a638b8ad3ae66ff77d17bd3ee504f12cc">MXAutogradGetSymbol</a></div><div class="ttdeci">MXNET_DLL int MXAutogradGetSymbol(NDArrayHandle handle, SymbolHandle *out)</div></div>
 <div class="ttc" id="c__api_8h_html_ad1646370458f8a3ddb9e4f3365c5e510"><div class="ttname"><a href="c__api_8h.html#ad1646370458f8a3ddb9e4f3365c5e510">MXExecutorBindX</a></div><div class="ttdeci">MXNET_DLL int MXExecutorBindX(SymbolHandle symbol_handle, int dev_type, int dev_id, mx_uint num_map_keys, const char **map_keys, const int *map_dev_types, const int *map_dev_ids, mx_uint len, NDArrayHandle *in_args, NDArrayHandle *arg_grad_store, mx_uint *grad_req_type, mx_uint aux_states_len, ND [...]
 <div class="ttc" id="structNativeOpInfo_html_a32ec41926eee694c1bfb8e8b5697a146"><div class="ttname"><a href="structNativeOpInfo.html#a32ec41926eee694c1bfb8e8b5697a146">NativeOpInfo::p_list_outputs</a></div><div class="ttdeci">void * p_list_outputs</div><div class="ttdef"><b>Definition:</b> c_api.h:107</div></div>
-<div class="ttc" id="c__api_8h_html_a31f639b22167ac51b67381b16dfd5634"><div class="ttname"><a href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634">MXKVStoreServerController</a></div><div class="ttdeci">void( MXKVStoreServerController)(int head, const char *body, void *controller_handle)</div><div class="ttdoc">the prototype of a server controller </div><div class="ttdef"><b>Definition:</b> c_api.h:1820</div></div>
+<div class="ttc" id="c__api_8h_html_a31f639b22167ac51b67381b16dfd5634"><div class="ttname"><a href="c__api_8h.html#a31f639b22167ac51b67381b16dfd5634">MXKVStoreServerController</a></div><div class="ttdeci">void( MXKVStoreServerController)(int head, const char *body, void *controller_handle)</div><div class="ttdoc">the prototype of a server controller </div><div class="ttdef"><b>Definition:</b> c_api.h:1831</div></div>
 <div class="ttc" id="c__api_8h_html_a5dc95ef008b73103b86f4576fcf9117d"><div class="ttname"><a href="c__api_8h.html#a5dc95ef008b73103b86f4576fcf9117d">MXNDArraySave</a></div><div class="ttdeci">MXNET_DLL int MXNDArraySave(const char *fname, mx_uint num_args, NDArrayHandle *args, const char **keys)</div><div class="ttdoc">Save list of narray into the file. </div></div>
 <div class="ttc" id="c__api_8h_html_a27c9353672df26b68c169fbb857f3a3f"><div class="ttname"><a href="c__api_8h.html#a27c9353672df26b68c169fbb857f3a3f">MXNDArrayLoadFromRawBytes</a></div><div class="ttdeci">MXNET_DLL int MXNDArrayLoadFromRawBytes(const void *buf, size_t size, NDArrayHandle *out)</div><div class="ttdoc">create a NDArray handle that is loaded from raw bytes. </div></div>
 <div class="ttc" id="c__api_8h_html_a20f72cb4e18d5081eaf40875adcb6e98"><div class="ttname"><a href="c__api_8h.html#a20f72cb4e18d5081eaf40875adcb6e98">MXRtcPush</a></div><div class="ttdeci">MXNET_DLL int MXRtcPush(RtcHandle handle, mx_uint num_input, mx_uint num_output, NDArrayHandle *inputs, NDArrayHandle *outputs, mx_uint gridDimX, mx_uint gridDimY, mx_uint gridDimZ, mx_uint blockDimX, mx_uint blockDimY, mx_uint blockDimZ)</div><div class="ttdoc">Run cuda kernel. </div></div>
@@ -1180,7 +1186,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </div><!-- fragment --></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/c__predict__api_8h.html b/versions/master/doxygen/c__predict__api_8h.html
index c314b36..7c0fa2f 100644
--- a/versions/master/doxygen/c__predict__api_8h.html
+++ b/versions/master/doxygen/c__predict__api_8h.html
@@ -793,7 +793,7 @@ Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/c__predict__api_8h_source.html b/versions/master/doxygen/c__predict__api_8h_source.html
index 5644f92..33a5145 100644
--- a/versions/master/doxygen/c__predict__api_8h_source.html
+++ b/versions/master/doxygen/c__predict__api_8h_source.html
@@ -193,7 +193,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </div><!-- fragment --></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4-members.html b/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4-members.html
index 6cdc617..1a9d069 100644
--- a/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4-members.html
+++ b/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4-members.html
@@ -89,7 +89,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html b/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html
index 5a28d2e..2038d90 100644
--- a/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html
+++ b/versions/master/doxygen/classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html
@@ -227,7 +227,7 @@ Public Member Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classes.html b/versions/master/doxygen/classes.html
index 72cf498..f56c8ee 100644
--- a/versions/master/doxygen/classes.html
+++ b/versions/master/doxygen/classes.html
@@ -78,51 +78,52 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 <div class="qindex"><a class="qindex" href="#letter_A">A</a> | <a class="qindex" href="#letter_C">C</a> | <a class="qindex" href="#letter_D">D</a> | <a class="qindex" href="#letter_E">E</a> | <a class="qindex" href="#letter_F">F</a> | <a class="qindex" href="#letter_G">G</a> | <a class="qindex" href="#letter_H">H</a> | <a class="qindex" href="#letter_I">I</a> | <a class="qindex" href="#letter_K">K</a> | <a class="qindex" href="#letter_L">L</a> | <a class="qindex" href="#letter_M">M</a> | [...]
 <table align="center" border="0" cellpadding="0" cellspacing="0" style="margin: 10px; white-space: nowrap;" width="95%">
 <tr><td rowspan="2" valign="bottom"><a name="letter_A"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  A  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1EnvArguments.html">EnvArguments</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1KVStore.html">KVStore</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1Operator.html">Operator</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td rowspan="2" valign="bottom"><a name="letter [...]
-</td></tr>
-<tr><td valign="top"><a class="el" href="classmxnet_1_1Executor.html">Executor</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_L"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  L  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="classmxnet_1_1OperatorProperty.html">OperatorProperty</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td></tr>
+</td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1EnvArguments.html">EnvArguments</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1Input1.html">Input1</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1ObjectPoolAllocatable.html">ObjectPoolAllocatable</a> (<a class="el" href="namespacemxnet_1_1common.html">mxn [...]
+<tr><td valign="top"><a class="el" href="classmxnet_1_1Executor.html">Executor</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_K"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  K  </div></td></tr></table>
+</td><td valign="top"><a class="el" href="structmxnet_1_1OpContext.html">OpContext</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1StaticArray.html">StaticArray</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </td></tr>
 <tr><td valign="top"><a class="el" href="classmxnet_1_1Imperative_1_1AGInfo.html">Imperative::AGInfo</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_F"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  F  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1OperatorPropertyReg.html">OperatorPropertyReg</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1TBlob.html">TBlob</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td></tr>
-<tr><td valign="top"><a class="el" href="structmxnet_1_1rtc_1_1CudaModule_1_1ArgType.html">CudaModule::ArgType</a> (<a class="el" href="namespacemxnet_1_1rtc.html">mxnet::rtc</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1LazyAllocArray.html">LazyAllocArray</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1OpStatePtr.html">OpStatePtr</a> (<a class="el" href="namespacemxnet.html">m [...]
+</td><td valign="top"><a class="el" href="classmxnet_1_1Operator.html">Operator</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1Storage.html">Storage</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td></tr>
+<tr><td valign="top"><a class="el" href="structmxnet_1_1rtc_1_1CudaModule_1_1ArgType.html">CudaModule::ArgType</a> (<a class="el" href="namespacemxnet_1_1rtc.html">mxnet::rtc</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1rtc_1_1CudaModule_1_1Kernel.html">CudaModule::Kernel</a> (<a class="el" href="namespacemxnet_1_1rtc.html">mxnet::rtc</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1OperatorProperty.html">OperatorProperty</a> (<a class="el" href="namespace [...]
 </td></tr>
 <tr><td rowspan="2" valign="bottom"><a name="letter_C"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  C  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html">FieldEntry&lt; mxnet::TShape &gt;</a> (<a class="el" href="namespacedmlc_1_1parameter.html">dmlc::parameter</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_M"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  M  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1OutputGrad.html">OutputGrad</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td></tr>
+</td><td valign="top"><a class="el" href="classdmlc_1_1parameter_1_1FieldEntry_3_01mxnet_1_1TShape_01_4.html">FieldEntry&lt; mxnet::TShape &gt;</a> (<a class="el" href="namespacedmlc_1_1parameter.html">dmlc::parameter</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1KVStore.html">KVStore</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1OperatorPropertyReg.html">OperatorPropertyReg</a> (<a class="el" href="na [...]
 <tr><td rowspan="2" valign="bottom"><a name="letter_G"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  G  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1OutputValue.html">OutputValue</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf.html">UniqueIf</a> (<a class="el" href="namespacemxnet_1_1common_1_1helper.html">mxnet::common::helper</a>)   </td></tr>
-<tr><td valign="top"><a class="el" href="classmxnet_1_1Imperative_1_1CachedOp.html">Imperative::CachedOp</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structMXCallbackList.html">MXCallbackList</a>   </td><td rowspan="2" valign="bottom"><a name="letter_R"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  R  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf_3_01T[]_4.html">UniqueIf&lt; T[]&gt;</a> (<a class="el" href="namespacemxnet_1_1common_1_1helper.html">mxnet::common::helper</a>)   </td></tr>
-<tr><td valign="top"><a class="el" href="structmxnet_1_1CachedOpParam.html">CachedOpParam</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1GradFunctionArgument.html">GradFunctionArgument</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_N"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  N  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf_3_01T[kSize]_4.html">UniqueIf&lt; T[kSize]&gt;</a> (<a class="el" href="namespacemxnet_1_1common_1_1helper.html">mxnet::common::helper</a>)   </td></tr>
-<tr><td valign="top"><a class="el" href="classmxnet_1_1engine_1_1CallbackOnComplete.html">CallbackOnComplete</a> (<a class="el" href="namespacemxnet_1_1engine.html">mxnet::engine</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_H"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  H  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1Resource.html">Resource</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_c"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  c  </div></td></tr></table>
+</td><td rowspan="2" valign="bottom"><a name="letter_L"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  L  </div></td></tr></table>
+</td><td valign="top"><a class="el" href="classmxnet_1_1OpStatePtr.html">OpStatePtr</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1TBlob.html">TBlob</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td></tr>
+<tr><td valign="top"><a class="el" href="classmxnet_1_1Imperative_1_1CachedOp.html">Imperative::CachedOp</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1OutputGrad.html">OutputGrad</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_U"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  U  </div></td></tr></table>
 </td></tr>
-<tr><td valign="top"><a class="el" href="structmxnet_1_1Context.html">Context</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structNativeOpInfo.html">NativeOpInfo</a>   </td><td valign="top"><a class="el" href="classmxnet_1_1ResourceManager.html">ResourceManager</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td></tr>
-<tr><td valign="top"><a class="el" href="classmxnet_1_1rtc_1_1CudaModule.html">CudaModule</a> (<a class="el" href="namespacemxnet_1_1rtc.html">mxnet::rtc</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1Storage_1_1Handle.html">Storage::Handle</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1NDArray.html">NDArray</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" hr [...]
+<tr><td valign="top"><a class="el" href="structmxnet_1_1CachedOpParam.html">CachedOpParam</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1GradFunctionArgument.html">GradFunctionArgument</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1LazyAllocArray.html">LazyAllocArray</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet [...]
+<tr><td valign="top"><a class="el" href="classmxnet_1_1engine_1_1CallbackOnComplete.html">CallbackOnComplete</a> (<a class="el" href="namespacemxnet_1_1engine.html">mxnet::engine</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_H"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  H  </div></td></tr></table>
+</td><td rowspan="2" valign="bottom"><a name="letter_M"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  M  </div></td></tr></table>
+</td><td rowspan="2" valign="bottom"><a name="letter_R"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  R  </div></td></tr></table>
+</td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf.html">UniqueIf</a> (<a class="el" href="namespacemxnet_1_1common_1_1helper.html">mxnet::common::helper</a>)   </td></tr>
+<tr><td valign="top"><a class="el" href="structmxnet_1_1Context.html">Context</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1helper_1_1UniqueIf_3_01T[]_4.html">UniqueIf&lt; T[]&gt;</a> (<a class="el" href="namespacemxnet_1_1common_1_1helper.html">mxnet::common::helper</a>)   </td></tr>
+<tr><td valign="top"><a class="el" href="classmxnet_1_1rtc_1_1CudaModule.html">CudaModule</a> (<a class="el" href="namespacemxnet_1_1rtc.html">mxnet::rtc</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1Storage_1_1Handle.html">Storage::Handle</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structMXCallbackList.html">MXCallbackList</a>   </td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGener [...]
 <tr><td rowspan="2" valign="bottom"><a name="letter_D"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  D  </div></td></tr></table>
 </td><td rowspan="2" valign="bottom"><a name="letter_I"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  I  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1NDArrayFunctionReg.html">NDArrayFunctionReg</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1RunContext.html">RunContext</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1csr__indptr__check.html">csr_indptr_check</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </ [...]
-<tr><td valign="top"><a class="el" href="structNDArrayOpInfo.html">NDArrayOpInfo</a>   </td><td rowspan="2" valign="bottom"><a name="letter_S"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  S  </div></td></tr></table>
-</td><td rowspan="2" valign="bottom"><a name="letter_r"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  r  </div></td></tr></table>
+</td><td rowspan="2" valign="bottom"><a name="letter_N"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  N  </div></td></tr></table>
+</td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01cpu_00_01DType_01_4.html">RandGenerator&lt; cpu, DType &gt;</a> (<a class="el" href="namespacemxnet_1_1common_1_1random.html">mxnet::common::random</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_c"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  c  </div></td></tr></table>
 </td></tr>
-<tr><td valign="top"><a class="el" href="structmxnet_1_1DataBatch.html">DataBatch</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1IIterator.html">IIterator</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_O"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  O  </div></td></tr></table>
+<tr><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01double_01_4.html">RandGenerator&lt; gpu, double &gt;</a> (<a class="el" href="namespacemxnet_1_1common_1_1random.html">mxnet::common::random</a>)   </td></tr>
+<tr><td valign="top"><a class="el" href="structmxnet_1_1DataBatch.html">DataBatch</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1IIterator.html">IIterator</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structNativeOpInfo.html">NativeOpInfo</a>   </td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01DType_01_4.html">Rand [...]
+<tr><td valign="top"><a class="el" href="structmxnet_1_1DataInst.html">DataInst</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1Imperative.html">Imperative</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1NDArray.html">NDArray</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1Resource.html">R [...]
+<tr><td valign="top"><a class="el" href="structmxnet_1_1DataIteratorReg.html">DataIteratorReg</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01cpu_00_01DType_01_4_1_1Impl.html">RandGenerator&lt; cpu, DType &gt;::Impl</a> (<a class="el" href="namespacemxnet_1_1common_1_1random.html">mxnet::common::random</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1NDArrayFunctionReg. [...]
 </td></tr>
-<tr><td valign="top"><a class="el" href="structmxnet_1_1DataInst.html">DataInst</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1Imperative.html">Imperative</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1op_1_1SimpleOpRegEntry.html">SimpleOpRegEntry</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" hre [...]
-<tr><td valign="top"><a class="el" href="structmxnet_1_1DataIteratorReg.html">DataIteratorReg</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1Input0.html">Input0</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1ObjectPool.html">ObjectPool</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </td><td valign= [...]
 <tr><td rowspan="2" valign="bottom"><a name="letter_E"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  E  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1Input1.html">Input1</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1ObjectPoolAllocatable.html">ObjectPoolAllocatable</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1StaticArray.html">StaticArray</a> (<a class="el" href="namespacemxnet_1_1common [...]
-<tr><td rowspan="2" valign="bottom"><a name="letter_K"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  K  </div></td></tr></table>
-</td><td valign="top"><a class="el" href="structmxnet_1_1OpContext.html">OpContext</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="classmxnet_1_1Storage.html">Storage</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td></td></tr>
-<tr><td valign="top"><a class="el" href="classmxnet_1_1Engine.html">Engine</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td></td><td></td><td></td></tr>
-<tr><td></td><td valign="top"><a class="el" href="classmxnet_1_1rtc_1_1CudaModule_1_1Kernel.html">CudaModule::Kernel</a> (<a class="el" href="namespacemxnet_1_1rtc.html">mxnet::rtc</a>)   </td><td></td><td></td><td></td></tr>
+</td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01DType_01_4_1_1Impl.html">RandGenerator&lt; gpu, DType &gt;::Impl</a> (<a class="el" href="namespacemxnet_1_1common_1_1random.html">mxnet::common::random</a>)   </td><td valign="top"><a class="el" href="structNDArrayOpInfo.html">NDArrayOpInfo</a>   </td><td valign="top"><a class="el" href="structmxnet_1_1ResourceRequest.html">ResourceRequest</a> (<a class="el" href="namespacemxnet.html"> [...]
+<tr><td valign="top"><a class="el" href="classmxnet_1_1common_1_1random_1_1RandGenerator_3_01gpu_00_01double_01_4_1_1Impl.html">RandGenerator&lt; gpu, double &gt;::Impl</a> (<a class="el" href="namespacemxnet_1_1common_1_1random.html">mxnet::common::random</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_O"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  O  </div></td></tr></table>
+</td><td valign="top"><a class="el" href="structmxnet_1_1RunContext.html">RunContext</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1common_1_1rsp__idx__check.html">rsp_idx_check</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </td></tr>
+<tr><td valign="top"><a class="el" href="classmxnet_1_1Engine.html">Engine</a> (<a class="el" href="namespacemxnet.html">mxnet</a>)   </td><td valign="top"><a class="el" href="structmxnet_1_1op_1_1Input0.html">Input0</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td rowspan="2" valign="bottom"><a name="letter_S"></a><table border="0" cellpadding="0" cellspacing="0"><tr><td><div class="ah">  S  </div></td></tr></table>
+</td><td></td></tr>
+<tr><td></td><td></td><td valign="top"><a class="el" href="classmxnet_1_1common_1_1ObjectPool.html">ObjectPool</a> (<a class="el" href="namespacemxnet_1_1common.html">mxnet::common</a>)   </td><td></td></tr>
+<tr><td></td><td></td><td></td><td valign="top"><a class="el" href="classmxnet_1_1op_1_1SimpleOpRegEntry.html">SimpleOpRegEntry</a> (<a class="el" href="namespacemxnet_1_1op.html">mxnet::op</a>)   </td><td></td></tr>
 <tr><td></td><td></td><td></td><td></td><td></td></tr>
 </table>
 <div class="qindex"><a class="qindex" href="#letter_A">A</a> | <a class="qindex" href="#letter_C">C</a> | <a class="qindex" href="#letter_D">D</a> | <a class="qindex" href="#letter_E">E</a> | <a class="qindex" href="#letter_F">F</a> | <a class="qindex" href="#letter_G">G</a> | <a class="qindex" href="#letter_H">H</a> | <a class="qindex" href="#letter_I">I</a> | <a class="qindex" href="#letter_K">K</a> | <a class="qindex" href="#letter_L">L</a> | <a class="qindex" href="#letter_M">M</a> | [...]
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:18 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Engine-members.html b/versions/master/doxygen/classmxnet_1_1Engine-members.html
index 494a0ea..ddd9902 100644
--- a/versions/master/doxygen/classmxnet_1_1Engine-members.html
+++ b/versions/master/doxygen/classmxnet_1_1Engine-members.html
@@ -108,7 +108,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Engine.html b/versions/master/doxygen/classmxnet_1_1Engine.html
index 27c0918..0e9ec51 100644
--- a/versions/master/doxygen/classmxnet_1_1Engine.html
+++ b/versions/master/doxygen/classmxnet_1_1Engine.html
@@ -941,7 +941,7 @@ Static Public Member Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Executor-members.html b/versions/master/doxygen/classmxnet_1_1Executor-members.html
index cde7495..1245239 100644
--- a/versions/master/doxygen/classmxnet_1_1Executor-members.html
+++ b/versions/master/doxygen/classmxnet_1_1Executor-members.html
@@ -97,7 +97,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Executor.html b/versions/master/doxygen/classmxnet_1_1Executor.html
index 6c170cf..4b0f991 100644
--- a/versions/master/doxygen/classmxnet_1_1Executor.html
+++ b/versions/master/doxygen/classmxnet_1_1Executor.html
@@ -659,7 +659,7 @@ Static Public Member Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1IIterator-members.html b/versions/master/doxygen/classmxnet_1_1IIterator-members.html
index ad5ca1d..3976162 100644
--- a/versions/master/doxygen/classmxnet_1_1IIterator-members.html
+++ b/versions/master/doxygen/classmxnet_1_1IIterator-members.html
@@ -91,7 +91,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1IIterator.html b/versions/master/doxygen/classmxnet_1_1IIterator.html
index 4cf885f..488b583 100644
--- a/versions/master/doxygen/classmxnet_1_1IIterator.html
+++ b/versions/master/doxygen/classmxnet_1_1IIterator.html
@@ -323,7 +323,7 @@ template&lt;typename DType &gt; </div>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Imperative-members.html b/versions/master/doxygen/classmxnet_1_1Imperative-members.html
index 61812d0..feb9592 100644
--- a/versions/master/doxygen/classmxnet_1_1Imperative-members.html
+++ b/versions/master/doxygen/classmxnet_1_1Imperative-members.html
@@ -95,7 +95,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Imperative.html b/versions/master/doxygen/classmxnet_1_1Imperative.html
index ca3321a..83d6008 100644
--- a/versions/master/doxygen/classmxnet_1_1Imperative.html
+++ b/versions/master/doxygen/classmxnet_1_1Imperative.html
@@ -511,7 +511,7 @@ Friends</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo-members.html b/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo-members.html
index 8e4fd64..ca56591 100644
--- a/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo-members.html
+++ b/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo-members.html
@@ -96,7 +96,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo.html b/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo.html
index 37cc4bd..2a38da3 100644
--- a/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo.html
+++ b/versions/master/doxygen/classmxnet_1_1Imperative_1_1AGInfo.html
@@ -338,7 +338,7 @@ Public Attributes</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp-members.html b/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp-members.html
index 2db0c2d..fe97f90 100644
--- a/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp-members.html
+++ b/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp-members.html
@@ -96,7 +96,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp.html b/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp.html
index 989ad05..0243b6d 100644
--- a/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp.html
+++ b/versions/master/doxygen/classmxnet_1_1Imperative_1_1CachedOp.html
@@ -440,7 +440,7 @@ Public Member Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1KVStore-members.html b/versions/master/doxygen/classmxnet_1_1KVStore-members.html
index ed69ebf..d2f9ca0 100644
--- a/versions/master/doxygen/classmxnet_1_1KVStore-members.html
+++ b/versions/master/doxygen/classmxnet_1_1KVStore-members.html
@@ -117,7 +117,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1KVStore.html b/versions/master/doxygen/classmxnet_1_1KVStore.html
index ebde651..8412dd2 100644
--- a/versions/master/doxygen/classmxnet_1_1KVStore.html
+++ b/versions/master/doxygen/classmxnet_1_1KVStore.html
@@ -1238,7 +1238,7 @@ Protected Attributes</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1NDArray-members.html b/versions/master/doxygen/classmxnet_1_1NDArray-members.html
index bffa68b..0b1c02d 100644
--- a/versions/master/doxygen/classmxnet_1_1NDArray-members.html
+++ b/versions/master/doxygen/classmxnet_1_1NDArray-members.html
@@ -144,7 +144,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1NDArray.html b/versions/master/doxygen/classmxnet_1_1NDArray.html
index d72a8dc..6436029 100644
--- a/versions/master/doxygen/classmxnet_1_1NDArray.html
+++ b/versions/master/doxygen/classmxnet_1_1NDArray.html
@@ -141,6 +141,7 @@ Public Member Functions</h2></td></tr>
 <tr class="memitem:a32ed2be4ecf94c58b19bd43b48f2b1fa"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="classmxnet_1_1NDArray.html#a32ed2be4ecf94c58b19bd43b48f2b1fa">set_fresh_out_grad</a> (bool state) const </td></tr>
 <tr class="separator:a32ed2be4ecf94c58b19bd43b48f2b1fa"><td class="memSeparator" colspan="2"> </td></tr>
 <tr class="memitem:a0f59c72c8caefcc069d5839d35288cd1"><td align="right" class="memItemLeft" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="classmxnet_1_1NDArray.html#a0f59c72c8caefcc069d5839d35288cd1">storage_initialized</a> () const </td></tr>
+<tr class="memdesc:a0f59c72c8caefcc069d5839d35288cd1"><td class="mdescLeft"> </td><td class="mdescRight">Returns true if a sparse ndarray's aux_data and storage are initialized Throws an exception if the indices array shape is inconsistent Returns false if the indices array is empty(nnz = 0) for csr/row_sparse.  <a href="#a0f59c72c8caefcc069d5839d35288cd1">More...</a><br/></td></tr>
 <tr class="separator:a0f59c72c8caefcc069d5839d35288cd1"><td class="memSeparator" colspan="2"> </td></tr>
 <tr class="memitem:ad330f5e35c41078cb64cd84ae59a0bc3"><td align="right" class="memItemLeft" valign="top"><a class="el" href="structmxnet_1_1Storage_1_1Handle.html">Storage::Handle</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="classmxnet_1_1NDArray.html#ad330f5e35c41078cb64cd84ae59a0bc3">storage_handle</a> () const </td></tr>
 <tr class="memdesc:ad330f5e35c41078cb64cd84ae59a0bc3"><td class="mdescLeft"> </td><td class="mdescRight">get storage handle  <a href="#ad330f5e35c41078cb64cd84ae59a0bc3">More...</a><br/></td></tr>
@@ -1529,6 +1530,7 @@ Friends</h2></td></tr>
 </tr>
 </table>
 </div><div class="memdoc">
+<p>Returns true if a sparse ndarray's aux_data and storage are initialized Throws an exception if the indices array shape is inconsistent Returns false if the indices array is empty(nnz = 0) for csr/row_sparse. </p>
 </div>
 </div>
 <a class="anchor" id="a4d8f894c0306ad693ccf29278a613732"></a>
@@ -1801,7 +1803,7 @@ Friends</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1OpStatePtr-members.html b/versions/master/doxygen/classmxnet_1_1OpStatePtr-members.html
index 71a469e..2388bbb 100644
--- a/versions/master/doxygen/classmxnet_1_1OpStatePtr-members.html
+++ b/versions/master/doxygen/classmxnet_1_1OpStatePtr-members.html
@@ -89,7 +89,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1OpStatePtr.html b/versions/master/doxygen/classmxnet_1_1OpStatePtr.html
index 9024235..b4e8efc 100644
--- a/versions/master/doxygen/classmxnet_1_1OpStatePtr.html
+++ b/versions/master/doxygen/classmxnet_1_1OpStatePtr.html
@@ -237,7 +237,7 @@ template&lt;typename T &gt; </div>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Operator-members.html b/versions/master/doxygen/classmxnet_1_1Operator-members.html
index 16e8c08..37d625d 100644
--- a/versions/master/doxygen/classmxnet_1_1Operator-members.html
+++ b/versions/master/doxygen/classmxnet_1_1Operator-members.html
@@ -88,7 +88,7 @@ var searchBox = new SearchBox("searchBox", "search",false,'Search');
 </table></div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1Operator.html b/versions/master/doxygen/classmxnet_1_1Operator.html
index 788befe..548bccf 100644
--- a/versions/master/doxygen/classmxnet_1_1Operator.html
+++ b/versions/master/doxygen/classmxnet_1_1Operator.html
@@ -307,7 +307,7 @@ Public Member Functions</h2></td></tr>
 </div><!-- contents -->
 <!-- start footer part -->
 <hr class="footer"/><address class="footer"><small>
-Generated on Wed Dec 6 2017 07:08:52 for mxnet by  <a href="http://www.doxygen.org/index.html">
+Generated on Thu Jan 4 2018 07:03:16 for mxnet by  <a href="http://www.doxygen.org/index.html">
 <img alt="doxygen" class="footer" src="doxygen.png"/>
 </a> 1.8.6
 </small></address>
diff --git a/versions/master/doxygen/classmxnet_1_1OperatorProperty-members.html b/versions/master/doxygen/classmxnet_1_1OperatorProperty-members.html
... 14391 lines suppressed ...

-- 
To stop receiving notification emails like this one, please contact
['"commits@mxnet.apache.org" <co...@mxnet.apache.org>'].