You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by wa...@apache.org on 2016/06/03 07:48:23 UTC

[18/60] incubator-singa git commit: SINGA-163 - Reorganize the project folder layout

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rbm/autoencoder.conf
----------------------------------------------------------------------
diff --git a/examples/rbm/autoencoder.conf b/examples/rbm/autoencoder.conf
deleted file mode 100644
index 223ad0d..0000000
--- a/examples/rbm/autoencoder.conf
+++ /dev/null
@@ -1,229 +0,0 @@
-name: "auto-encoder"
-train_steps: 12200
-test_steps:100
-test_freq:1000
-disp_freq:100
-checkpoint_path: "examples/rbm/rbm4/checkpoint/step6000-worker0"
-checkpoint_path: "examples/rbm/rbm3/checkpoint/step6000-worker0"
-checkpoint_path: "examples/rbm/rbm2/checkpoint/step6000-worker0"
-checkpoint_path: "examples/rbm/rbm1/checkpoint/step6000-worker0"
-train_one_batch{
-  alg: kBP
-}
-updater{
-  type: kAdaGrad
-  learning_rate{
-  base_lr: 0.01
-  type: kFixed
-  }
-}
-
-neuralnet {
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/train_data.bin"
-      batchsize: 100
-      std_value: 255
-      shape: 784
-    }
-    include: kTrain
-  }
-
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/test_data.bin"
-      std_value: 255
-      batchsize: 100
-      shape: 784
-    }
-    include: kTest
-  }
-
-
-  layer{
-    name: "Inner1"
-    type: kInnerProduct
-    srclayers:"data"
-    innerproduct_conf{
-      num_output: 1000
-    }
-    param{
-      name: "w1"
-    }
-    param{
-      name: "b12"
-    }
-  }
-
-  layer{
-    name: "Sigmoid1"
-    type: kSigmoid
-    srclayers:"Inner1"
-  }
-  layer{
-    name: "Inner2"
-    type: kInnerProduct
-    srclayers:"Sigmoid1"
-    innerproduct_conf{
-      num_output: 500
-    }
-    param{
-      name: "w2"
-    }
-    param{
-      name: "b22"
-    }
-  }
-
-  layer{
-    name: "Sigmoid2"
-    type: kSigmoid
-    srclayers:"Inner2"
-  }
-
-  layer{
-    name: "Inner3"
-    type:  kInnerProduct
-    srclayers:"Sigmoid2"
-    innerproduct_conf{
-      num_output: 250
-    }
-    param{
-      name: "w3"
-    }
-    param{
-      name: "b32"
-    }
-  }
-
-  layer{
-    name: "Sigmoid3"
-    type: kSigmoid
-    srclayers:"Inner3"
-  }
-
-  layer{
-    name: "Inner4"
-    type: kInnerProduct
-    srclayers:"Sigmoid3"
-    innerproduct_conf{
-      num_output: 30
-    }
-    param{
-      name: "w4"
-    }
-    param{
-      name: "b42"
-
-    }
-  }
-
-  layer{
-    name: "Inner5"
-    type: kInnerProduct
-    #srclayers:"Sigmoid4"
-    srclayers:"Inner4"
-    innerproduct_conf{
-      num_output: 250
-      transpose: true
-    }
-    param{
-      name: "w5"
-      share_from: "w4"
-    }
-    param{
-      name: "b41"
-    }
-  }
-
-  layer{
-    name: "Sigmoid5"
-    type: kSigmoid
-    srclayers:"Inner5"
-  }
-  layer{
-    name: "Inner6"
-    type: kInnerProduct
-    srclayers:"Sigmoid5"
-    innerproduct_conf{
-      num_output: 500
-      transpose: true
-    }
-    param{
-      name: "w6"
-      share_from: "w3"
-    }
-    param{
-      name: "b31"
-    }
-  }
-
-  layer{
-    name: "Sigmoid6"
-    type: kSigmoid
-    srclayers:"Inner6"
-  }
- layer{
-    name: "Inner7"
-    type: kInnerProduct
-    srclayers:"Sigmoid6"
-    innerproduct_conf{
-      num_output: 1000
-      transpose: true
-    }
-    param{
-      name: "w7"
-      share_from: "w2"
-    }
-    param{
-      name: "b21"
-    }
-
-  }
-
-  layer{
-    name: "Sigmoid7"
-    type: kSigmoid
-    srclayers:"Inner7"
-  }
- layer{
-    name: "Inner8"
-    type: kInnerProduct
-    srclayers:"Sigmoid7"
-    innerproduct_conf{
-      num_output: 784
-      transpose: true
-    }
-    param{
-      name: "w8"
-      share_from: "w1"
-    }
-    param{
-      name: "b11"
-    }
-  }
-
-  layer{
-    name: "Sigmoid8"
-    type: kSigmoid
-    srclayers:"Inner8"
-  }
-
-  layer{
-    name: "loss"
-    type:kEuclideanLoss
-    srclayers:"Sigmoid8"
-    srclayers:"data"
-  }
-}
-cluster {
-  nworker_groups: 1
-  nserver_groups: 1
-  workspace: "examples/rbm/autoencoder/"
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rbm/rbm1.conf
----------------------------------------------------------------------
diff --git a/examples/rbm/rbm1.conf b/examples/rbm/rbm1.conf
deleted file mode 100644
index 696a8cb..0000000
--- a/examples/rbm/rbm1.conf
+++ /dev/null
@@ -1,101 +0,0 @@
-name: "rbm1"
-train_steps: 6000
-test_steps:100
-test_freq:500
-disp_freq: 100
-train_one_batch{
-  alg: kCD
-}
-updater{
-  type: kSGD
-  momentum: 0.8
-  weight_decay: 0.0002
-  learning_rate{
-    base_lr: 0.1
-    type: kFixed
-  }
-}
-
-neuralnet {
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/train_data.bin"
-      batchsize: 100
-      std_value: 255
-      shape: 784
-    }
-    include: kTrain
-  }
-
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/test_data.bin"
-      std_value: 255
-      batchsize: 100
-      shape: 784
-    }
-    include: kTest
-  }
-
-layer{
-  name: "RBMVis"
-  type: kRBMVis
-  srclayers:"data"
-  srclayers:"RBMHid"
-  rbm_conf{
-    hdim: 1000
-  }
-  param{
-    name: "w1"
-    init{
-      type: kGaussian
-      mean: 0.0
-      std: 0.1
-    }
-  }
-
-  param{
-    name: "b11"
-    wd_scale: 0
-    init{
-      type: kConstant
-      value: 0.0
-    }
-  }
-}
-
-layer{
-  name: "RBMHid"
-  type: kRBMHid
-  srclayers:"RBMVis"
-  rbm_conf{
-    hdim: 1000
-  }
-  param{
-    name: "w1_"
-    share_from: "w1"
-  }
-
-  param{
-    name: "b12"
-    wd_scale: 0
-    init{
-      type: kConstant
-      value: 0.0
-    }
-  }
-}
-}
-cluster {
-  nworker_groups: 1
-  nserver_groups: 1
-  nservers_per_group: 1
-  nworkers_per_group: 1
-  workspace: "examples/rbm/rbm1/"
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rbm/rbm2.conf
----------------------------------------------------------------------
diff --git a/examples/rbm/rbm2.conf b/examples/rbm/rbm2.conf
deleted file mode 100644
index ddb9681..0000000
--- a/examples/rbm/rbm2.conf
+++ /dev/null
@@ -1,122 +0,0 @@
-name: "rbm2"
-train_steps: 6000
-test_steps:100
-test_freq:500
-disp_freq: 100
-train_one_batch{
-  alg: kCD
-}
-checkpoint_path: "examples/rbm/rbm1/checkpoint/step6000-worker0"
-updater{
-  type: kSGD
-  momentum: 0.8
-  weight_decay: 0.0002
-  learning_rate{
-  base_lr: 0.1
-  type: kFixed
-  }
-}
-
-neuralnet {
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/train_data.bin"
-      batchsize: 100
-      std_value: 255
-      shape: 784
-    }
-    include: kTrain
-  }
-
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/test_data.bin"
-      std_value: 255
-      batchsize: 100
-      shape: 784
-    }
-    include: kTest
-  }
-
-layer{
-  name: "Inner1"
-  type: kInnerProduct
-  srclayers:"data"
-  innerproduct_conf{
-    num_output: 1000
-  }
-  param{
-    name: "w1"
-  }
-  param{
-    name: "b12"
-  }
-}
-
-layer{
-  name: "Sigmoid1"
-  type: kSigmoid
-  srclayers:"Inner1"
-}
-
-layer{
-  name: "RBMVis"
-  type: kRBMVis
-  srclayers:"Sigmoid1"
-  srclayers:"RBMHid"
-  rbm_conf{
-    hdim: 500
-  }
-  param{
-    name: "w2"
-    init{
-      type: kGaussian
-      mean: 0.0
-      std: 0.1
-    }
-  }
-
-  param{
-    name: "b21"
-    wd_scale: 0
-    init{
-    type: kConstant
-    value: 0.0
-    }
-  }
-}
-
-layer{
-  name: "RBMHid"
-  type: kRBMHid
-  srclayers:"RBMVis"
-  rbm_conf{
-    hdim: 500
-  }
-  param{
-    name: "w2_"
-    share_from: "w2"
-  }
- param{
-    name: "b22"
-    wd_scale: 0
-    init{
-      type: kConstant
-      value: 0.0
-    }
-  }
-}
-}
-cluster {
-  nworker_groups: 1
-  nserver_groups: 1
-  nservers_per_group: 1
-  nworkers_per_group: 1
-  workspace: "examples/rbm/rbm2/"
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rbm/rbm3.conf
----------------------------------------------------------------------
diff --git a/examples/rbm/rbm3.conf b/examples/rbm/rbm3.conf
deleted file mode 100644
index 44eae77..0000000
--- a/examples/rbm/rbm3.conf
+++ /dev/null
@@ -1,147 +0,0 @@
-name: "rbm3"
-train_steps: 6000
-test_steps:100
-test_freq:500
-disp_freq: 100
-train_one_batch{
-  alg: kCD
-}
-checkpoint_path: "examples/rbm/rbm2/checkpoint/step6000-worker0"
-checkpoint_path: "examples/rbm/rbm1/checkpoint/step6000-worker0"
-
-updater{
-  type: kSGD
-  momentum: 0.8
-  weight_decay: 0.0002
-  learning_rate{
-    base_lr: 0.1
-    type: kFixed
-  }
-}
-
-
-neuralnet {
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/train_data.bin"
-      batchsize: 100
-      std_value: 255
-      shape: 784
-    }
-    include: kTrain
-  }
-
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/test_data.bin"
-      std_value: 255
-      batchsize: 100
-      shape: 784
-    }
-    include: kTest
-  }
-
-
-layer{
-    name: "Inner1"
-    type: kInnerProduct
-    srclayers:"data"
-    innerproduct_conf{
-      num_output: 1000
-    }
-    param{
-      name: "w1"
-    }
-    param{
-      name: "b12"
-    }
-  }
-
-  layer{
-    name: "Sigmoid1"
-    type: kSigmoid
-    srclayers:"Inner1"
-  }
-
-layer{
-    name: "Inner2"
-    type: kInnerProduct
-    srclayers:"Sigmoid1"
-    innerproduct_conf{
-      num_output: 500
-    }
-    param{
-      name: "w2"
-    }
-    param{
-      name: "b22"
-    }
-  }
-
-  layer{
-    name: "Sigmoid2"
-    type: kSigmoid
-    srclayers:"Inner2"
-  }
-layer{
-  name: "RBMVis"
-  type: kRBMVis
-  srclayers:"Sigmoid2"
-  srclayers:"RBMHid"
-  rbm_conf{
-    hdim: 250
-  }
-  param{
-    name: "w3"
-    init{
-      type: kGaussian
-      mean: 0.0
-      std: 0.1
-    }
-  }
-
-  param{
-    name: "b31"
-    wd_scale: 0
-    init{
-    type: kConstant
-    value: 0.0
-    }
-  }
-}
-
-layer{
-  name: "RBMHid"
-  type: kRBMHid
-  srclayers:"RBMVis"
-  rbm_conf{
-    hdim: 250
-  }
-  param{
-    name: "w3_"
-    share_from: "w3"
-  }
-
-  param{
-    name: "b32"
-    wd_scale: 0
-    init{
-    type: kConstant
-    value: 0.0
-    }
-  }
-}
-}
-cluster {
-  nworker_groups: 1
-  nserver_groups: 1
-  nservers_per_group: 1
-  nworkers_per_group: 1
-  workspace: "examples/rbm/rbm3/"
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rbm/rbm4.conf
----------------------------------------------------------------------
diff --git a/examples/rbm/rbm4.conf b/examples/rbm/rbm4.conf
deleted file mode 100644
index bb023c4..0000000
--- a/examples/rbm/rbm4.conf
+++ /dev/null
@@ -1,167 +0,0 @@
-name: "rbm4"
-train_steps: 6000
-test_steps: 100
-test_freq: 500
-disp_freq: 100
-train_one_batch{
-  alg: kCD
-}
-checkpoint_path: "examples/rbm/rbm3/checkpoint/step6000-worker0"
-checkpoint_path: "examples/rbm/rbm2/checkpoint/step6000-worker0"
-checkpoint_path: "examples/rbm/rbm1/checkpoint/step6000-worker0"
-updater{
-    type: kSGD
-    momentum: 0.8
-    weight_decay: 0.0002
-    learning_rate{
-      base_lr: 0.001
-      type: kFixed
-    }
-}
-
-neuralnet {
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/train_data.bin"
-      batchsize: 100
-      std_value: 255
-      shape: 784
-    }
-    include: kTrain
-  }
-
-  layer {
-    name: "data"
-    type: kRecordInput
-    store_conf {
-      backend: "kvfile"
-      path: "examples/mnist/test_data.bin"
-      std_value: 255
-      batchsize: 100
-      shape: 784
-    }
-    include: kTest
-  }
-
-
-  layer{
-    name: "Inner1"
-    type: kInnerProduct
-    srclayers:"data"
-    innerproduct_conf{
-      num_output: 1000
-    }
-    param{
-      name: "w1"
-    }
-    param{
-      name: "b12"
-    }
-  }
-
-  layer{
-    name: "Sigmoid1"
-    type: kSigmoid
-    srclayers:"Inner1"
-  }
-
-layer{
-    name: "Inner2"
-    type: kInnerProduct
-    srclayers:"Sigmoid1"
-    innerproduct_conf{
-      num_output: 500
-    }
-    param{
-      name: "w2"
-    }
-    param{
-      name: "b22"
-    }
-  }
-
-  layer{
-    name: "Sigmoid2"
-    type: kSigmoid
-    srclayers:"Inner2"
-  }
-
-layer{
-    name: "Inner3"
-    type: kInnerProduct
-    srclayers:"Sigmoid2"
-    innerproduct_conf{
-      num_output: 250
-    }
-    param{
-      name: "w3"
-    }
-    param{
-      name: "b32"
-    }
-  }
-
-  layer{
-    name: "Sigmoid3"
-    type: kSigmoid
-    srclayers:"Inner3"
-  }
-
-layer{
-  name: "RBMVis"
-  type: kRBMVis
-  srclayers:"Sigmoid3"
-  srclayers:"RBMHid"
-  rbm_conf{
-    hdim: 30
-  }
-  param{
-    name: "w4"
-    init{
-      type: kGaussian
-      mean: 0.0
-      std: 0.1
-    }
-  }
-  param{
-    name: "b41"
-    wd_scale: 0
-    init{
-    type: kConstant
-    value: 0.0
-    }
-  }
-}
-
-layer{
-  name: "RBMHid"
-  type: kRBMHid
-  srclayers:"RBMVis"
-  rbm_conf{
-    hdim: 30
-    gaussian: true
-  }
-  param{
-    name: "w4_"
-    share_from: "w4"
-  }
-  param{
-    name: "b42"
-    wd_scale: 0
-    init{
-    type: kConstant
-    value: 0.0
-    }
-  }
-}
-}
-cluster {
-  nworker_groups: 1
-  nserver_groups: 1
-  nservers_per_group: 1
-  nworkers_per_group: 1
-  workspace: "examples/rbm/rbm4/"
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/Makefile.example
----------------------------------------------------------------------
diff --git a/examples/rnnlm/Makefile.example b/examples/rnnlm/Makefile.example
deleted file mode 100644
index 13c5e42..0000000
--- a/examples/rnnlm/Makefile.example
+++ /dev/null
@@ -1,52 +0,0 @@
-#/************************************************************
-#*
-#* Licensed to the Apache Software Foundation (ASF) under one
-#* or more contributor license agreements.  See the NOTICE file
-#* distributed with this work for additional information
-#* regarding copyright ownership.  The ASF licenses this file
-#* to you under the Apache License, Version 2.0 (the
-#* "License"); you may not use this file except in compliance
-#* with the License.  You may obtain a copy of the License at
-#*
-#*   http://www.apache.org/licenses/LICENSE-2.0
-#*
-#* Unless required by applicable law or agreed to in writing,
-#* software distributed under the License is distributed on an
-#* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#* KIND, either express or implied.  See the License for the
-#* specific language governing permissions and limitations
-#* under the License.
-#*
-#*************************************************************/
-
-MSHADOW_FLAGS :=-DMSHADOW_USE_CUDA=0 -DMSHADOW_USE_CBLAS=1 -DMSHADOW_USE_MKL=0
-
-libs :=singa glog protobuf
-filename = rnnlm-0.4b.tgz
-# note: filelink for rnnlm-0.4b may change
-filelink = https://f25ea9ccb7d3346ce6891573d543960492b92c30.googledrive.com/host/0ByxdPXuxLPS5RFM5dVNvWVhTd0U
-dirname = $(patsubst %.tgz,%, $(filename))
-numclass = 100
-dirshards = train_shard valid_shard test_shard
-
-
-
-download:
-	wget $(filelink)/$(filename)
-	tar zxf $(filename)
-	rm $(filename)
-
-create:
-	protoc --proto_path=../../src/proto --proto_path=. --cpp_out=. rnnlm.proto
-	$(CXX) create_data.cc rnnlm.pb.cc -std=c++11 -lsinga -lprotobuf -lzookeeper_mt -lglog -I../../include -I../../include/singa/proto \
-		-L../../.libs/ -L/usr/local/lib -Wl,-unresolved-symbols=ignore-in-shared-libs -Wl,-rpath=../../.libs/ \
-		-o create_data.bin
-	for d in $(dirshards); do mkdir -p $${d}; done
-	./create_data.bin -train $(dirname)/train -test $(dirname)/test -valid $(dirname)/valid -class_size $(numclass)
-
-
-rnnlm:
-	protoc --proto_path=../../src/proto --proto_path=. --cpp_out=. rnnlm.proto
-	$(CXX) main.cc rnnlm.cc rnnlm.pb.cc $(MSHADOW_FLAGS) -msse3 -std=c++11 -lsinga -lglog -lprotobuf -lopenblas -I../../include -I../../include/singa/proto \
-		-L../../.libs/ -L/usr/local  -Wl,-unresolved-symbols=ignore-in-shared-libs -Wl,-rpath=../../.libs/\
-		-o rnnlm.bin

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/README.md
----------------------------------------------------------------------
diff --git a/examples/rnnlm/README.md b/examples/rnnlm/README.md
deleted file mode 100644
index 9e83686..0000000
--- a/examples/rnnlm/README.md
+++ /dev/null
@@ -1,52 +0,0 @@
-This example trains the [RNN model](http://www.fit.vutbr.cz/research/groups/speech/publi/2010/mikolov_interspeech2010_IS100722.pdf) proposed by Tomas Mikolov for [language modeling](https://en.wikipedia.org/wiki/Language_model) over text dataset contains 71350 words, provided at [RNNLM Toolkit](https://f25ea9ccb7d3346ce6891573d543960492b92c30.googledrive.com/host/0ByxdPXuxLPS5RFM5dVNvWVhTd0U).
-The training objective (loss) is to minimize the [perplexity per word](https://en.wikipedia.org/wiki/Perplexity), which is equivalent to maximize the probability of predicting the next word given the current word in a sentence.
-The purpose of this example is to show users how to implement and use their own layers for RNN in SINGA.
-The example RNN model consists of six layers, namely RnnDataLayer, WordLayer, RnnLabelLayer, EmbeddingLayer, HiddenLayer, and OutputLayer. 
-
-## File description
-
-The files in this folder include:
-
-* rnnlm.proto, definition of the configuration protocol of the layers.
-* rnnlm.h, declaration of the layers.
-* rnnlm.cc, definition of the layers.
-* main.cc, main function that register the layers.
-* Makefile.exmaple, Makefile for compiling all source code in this folder.
-* job.conf, the job configuration for training the RNN language model.
-
-
-## Data preparation
-
-To use the RNNLM dataset, we can download it and create DataShard by typing
-
-    # in rnnlm/ folder
-    cp Makefile.example Makefile
-    make download
-    make create
-
-## Compilation
-
-The *Makefile.example* contains instructions for compiling the source code.
-
-    # in rnnlm/ folder
-    cp Makefile.example Makefile
-    make rnnlm
-
-It will generate an executable file *rnnlm.bin*.
-
-## Running
-
-Make sure that there is one example job configuration file, named *job.conf*.
-
-Before running SINGA, we need to export the `LD_LIBRARY_PATH` to
-include the libsinga.so by the following script.
-
-    # at the root folder of SINGA
-    export LD_LIBRARY_PATH=.libs:$LD_LIBRARY_PATH
-
-Then, we can run SINGA as follows. 
-
-    # at the root folder of SINGA
-    ./bin/singa-run.sh -exec examples/rnnlm/rnnlm.bin -conf examples/rnnlm/job.conf
-
-You will see the values of loss and ppl at each training step.

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/create_data.cc
----------------------------------------------------------------------
diff --git a/examples/rnnlm/create_data.cc b/examples/rnnlm/create_data.cc
deleted file mode 100644
index d1edbdb..0000000
--- a/examples/rnnlm/create_data.cc
+++ /dev/null
@@ -1,444 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-/*
- * This file include code from rnnlmlib-0.4 under BSD new license.
- * Copyright (c) 2010-2012 Tomas Mikolov
- * Copyright (c) 2013 Cantab Research Ltd
- * All rights reserved.
- */
-
-
-//
-// This code creates DataShard for RNNLM dataset.
-// The RNNLM dataset could be downloaded at
-//    http://www.rnnlm.org/
-//
-// Usage:
-//    create_shard.bin -train [train_file] -valid [valid_file]
-//                     -test [test_file] -class_size [# of classes]
-
-#include <cstring>
-#include <cstdlib>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <fstream>
-
-#include "singa/io/store.h"
-#include "singa/utils/common.h"
-#include "singa/proto/common.pb.h"
-#include "./rnnlm.pb.h"
-
-#define MAX_STRING 100
-#define BUFFER_LEN 32
-#define NL_STRING  "</s>"
-
-using std::string;
-using std::max;
-using std::min;
-
-struct vocab_word {
-  int cn;
-  char word[MAX_STRING];
-  int class_index;
-};
-
-struct vocab_word *vocab;
-int vocab_max_size;
-int vocab_size;
-int *vocab_hash;
-int vocab_hash_size;
-int debug_mode;
-int old_classes;
-int *class_start;
-int *class_end;
-int class_size;
-
-char train_file[MAX_STRING];
-char valid_file[MAX_STRING];
-char test_file[MAX_STRING];
-
-int valid_mode;
-int test_mode;
-
-unsigned int getWordHash(char *word) {
-  unsigned int hash, a;
-
-  hash = 0;
-  for (a = 0; a < strlen(word); a++) hash = hash * 237 + word[a];
-  hash = hash % vocab_hash_size;
-
-  return hash;
-}
-
-int searchVocab(char *word) {
-  int a;
-  unsigned int hash;
-
-  hash = getWordHash(word);
-
-  if (vocab_hash[hash] == -1) return -1;
-  if (!strcmp(word, vocab[vocab_hash[hash]].word)) return vocab_hash[hash];
-
-  for (a = 0; a < vocab_size; a++) {   // search in vocabulary
-    if (!strcmp(word, vocab[a].word)) {
-      vocab_hash[hash] = a;
-      return a;
-    }
-  }
-
-  return -1;   // return OOV if not found
-}
-
-int addWordToVocab(char *word) {
-  unsigned int hash;
-
-  snprintf(vocab[vocab_size].word, strlen(word)+1, "%s", word);
-  vocab[vocab_size].cn = 0;
-  vocab_size++;
-
-  if (vocab_size + 2 >= vocab_max_size) {   // reallocate memory if needed
-    vocab_max_size += 100;
-    vocab = (struct vocab_word *) realloc(
-        vocab,
-        vocab_max_size * sizeof(struct vocab_word));
-  }
-
-  hash = getWordHash(word);
-  vocab_hash[hash] = vocab_size - 1;
-
-  return vocab_size - 1;
-}
-
-void readWord(char *word, FILE *fin) {
-  int a = 0, ch;
-
-  while (!feof(fin)) {
-    ch = fgetc(fin);
-
-    if (ch == 13) continue;
-
-    if ((ch == ' ') || (ch == '\t') || (ch == '\n')) {
-      if (a > 0) {
-        if (ch == '\n') ungetc(ch, fin);
-        break;
-      }
-
-      if (ch == '\n') {
-        snprintf(word, strlen(NL_STRING) + 1,
-            "%s", const_cast<char *>(NL_STRING));
-        return;
-      } else {
-        continue;
-      }
-    }
-
-    word[a] = static_cast<char>(ch);
-    a++;
-
-    if (a >= MAX_STRING) {
-      // printf("Too long word found!\n");   //truncate too long words
-      a--;
-    }
-  }
-  word[a] = 0;
-}
-
-void sortVocab() {
-  int a, b, max;
-  vocab_word swap;
-
-  for (a = 1; a < vocab_size; a++) {
-    max = a;
-    for (b = a + 1; b < vocab_size; b++)
-      if (vocab[max].cn < vocab[b].cn) max = b;
-
-    swap = vocab[max];
-    vocab[max] = vocab[a];
-    vocab[a] = swap;
-  }
-}
-
-int learnVocabFromTrainFile() {
-  char word[MAX_STRING];
-  FILE *fin;
-  int a, i, train_wcn;
-
-  for (a = 0; a < vocab_hash_size; a++) vocab_hash[a] = -1;
-
-  fin = fopen(train_file, "rb");
-
-  vocab_size = 0;
-
-  addWordToVocab(const_cast<char *>(NL_STRING));
-
-  train_wcn = 0;
-  while (1) {
-    readWord(word, fin);
-    if (feof(fin)) break;
-
-    train_wcn++;
-
-    i = searchVocab(word);
-    if (i == -1) {
-      a = addWordToVocab(word);
-      vocab[a].cn = 1;
-    } else {
-      vocab[i].cn++;
-    }
-  }
-
-  sortVocab();
-
-  if (debug_mode > 0) {
-    printf("Vocab size: %d\n", vocab_size);
-    printf("Words in train file: %d\n", train_wcn);
-  }
-
-  fclose(fin);
-  return 0;
-}
-
-int splitClasses() {
-  double df, dd;
-  int i, a, b;
-
-  df = 0;
-  dd = 0;
-  a = 0;
-  b = 0;
-
-  class_start = reinterpret_cast<int *>(calloc(class_size, sizeof(int)));
-  memset(class_start, 0x7f, sizeof(int) * class_size);
-  class_end = reinterpret_cast<int *>(calloc(class_size, sizeof(int)));
-  memset(class_end, 0, sizeof(int) * class_size);
-
-  if (old_classes) {    // old classes
-    for (i = 0; i < vocab_size; i++)
-      b += vocab[i].cn;
-    for (i = 0; i < vocab_size; i++) {
-      df += vocab[i].cn / static_cast<double>(b);
-      if (df > 1) df = 1;
-      if (df > (a + 1) / static_cast<double>(class_size)) {
-        vocab[i].class_index = a;
-        if (a < class_size - 1) a++;
-      } else {
-        vocab[i].class_index = a;
-      }
-    }
-  } else {            // new classes
-    for (i = 0; i < vocab_size; i++)
-      b += vocab[i].cn;
-    for (i = 0; i < vocab_size; i++)
-      dd += sqrt(vocab[i].cn / static_cast<double>(b));
-    for (i = 0; i < vocab_size; i++) {
-      df += sqrt(vocab[i].cn / static_cast<double>(b)) / dd;
-      if (df > 1) df = 1;
-      if (df > (a + 1) / static_cast<double>(class_size)) {
-        vocab[i].class_index = a;
-        if (a < class_size - 1) a++;
-      } else {
-        vocab[i].class_index = a;
-      }
-    }
-  }
-
-  // after dividing classes, update class start and class end information
-  for (i = 0; i < vocab_size; i++)  {
-    a = vocab[i].class_index;
-    class_start[a] = min(i, class_start[a]);
-    class_end[a] = max(i + 1, class_end[a]);
-  }
-  return 0;
-}
-
-int init_class() {
-  // debug_mode = 1;
-  debug_mode = 0;
-  vocab_max_size = 100;  // largest length value for each word
-  vocab_size = 0;
-  vocab = (struct vocab_word *) calloc(vocab_max_size,
-      sizeof(struct vocab_word));
-  vocab_hash_size = 100000000;
-  vocab_hash = reinterpret_cast<int *>(calloc(vocab_hash_size, sizeof(int)));
-  old_classes = 1;
-
-  // read vocab
-  learnVocabFromTrainFile();
-
-  // split classes
-  splitClasses();
-
-  return 0;
-}
-
-int create_data(const char *input_file, const char *output) {
-  auto* store = singa::io::OpenStore("kvfile", output, singa::io::kCreate);
-  WordRecord wordRecord;
-
-  FILE *fin;
-  int a, i;
-  fin = fopen(input_file, "rb");
-
-  int wcnt = 0;
-  char key[BUFFER_LEN];
-  char wordstr[MAX_STRING];
-  string value;
-  while (1) {
-    readWord(wordstr, fin);
-    if (feof(fin)) break;
-    i = searchVocab(wordstr);
-    if (i == -1) {
-      if (debug_mode) printf("unknown word [%s] detected!", wordstr);
-    } else {
-      wordRecord.set_word(string(wordstr));
-      wordRecord.set_word_index(i);
-      int class_idx = vocab[i].class_index;
-      wordRecord.set_class_index(class_idx);
-      wordRecord.set_class_start(class_start[class_idx]);
-      wordRecord.set_class_end(class_end[class_idx]);
-      int length = snprintf(key, BUFFER_LEN, "%05d", wcnt++);
-      wordRecord.SerializeToString(&value);
-      store->Write(string(key, length), value);
-    }
-  }
-
-  fclose(fin);
-  store->Flush();
-  delete store;
-  return 0;
-}
-
-int argPos(char *str, int argc, char **argv) {
-  int a;
-
-  for (a = 1; a < argc; a++)
-    if (!strcmp(str, argv[a]))
-      return a;
-
-  return -1;
-}
-
-int main(int argc, char **argv) {
-  int i;
-  FILE *f;
-
-  // set debug mode
-  i = argPos(const_cast<char *>("-debug"), argc, argv);
-  if (i > 0) {
-    debug_mode = 1;
-    if (debug_mode > 0)
-      printf("debug mode: %d\n", debug_mode);
-  }
-
-  // search for train file
-  i = argPos(const_cast<char *>("-train"), argc, argv);
-  if (i > 0) {
-    if (i + 1 == argc) {
-      printf("ERROR: training data file not specified!\n");
-      return 0;
-    }
-
-    snprintf(train_file, strlen(argv[i + 1])+1, "%s", argv[i + 1]);
-
-    if (debug_mode > 0)
-      printf("train file: %s\n", train_file);
-
-    f = fopen(train_file, "rb");
-    if (f == NULL) {
-      printf("ERROR: training data file not found!\n");
-      return 0;
-    }
-    fclose(f);
-  } else {
-    printf("ERROR: training data must be set.\n");
-  }
-
-  // search for valid file
-  i = argPos(const_cast<char *>("-valid"), argc, argv);
-  if (i > 0) {
-    if (i + 1 == argc) {
-      printf("ERROR: validating data file not specified!\n");
-      return 0;
-    }
-
-    snprintf(valid_file, strlen(argv[i + 1])+1, "%s", argv[i + 1]);
-
-    if (debug_mode > 0)
-      printf("valid file: %s\n", valid_file);
-
-    f = fopen(valid_file, "rb");
-    if (f == NULL) {
-      printf("ERROR: validating data file not found!\n");
-      return 0;
-    }
-    fclose(f);
-    valid_mode = 1;
-  }
-
-  // search for test file
-  i = argPos(const_cast<char *>("-test"), argc, argv);
-  if (i > 0) {
-    if (i + 1 == argc) {
-      printf("ERROR: testing data file not specified!\n");
-      return 0;
-    }
-
-    snprintf(test_file, strlen(argv[i + 1])+1, "%s", argv[i + 1]);
-
-    if (debug_mode > 0)
-      printf("test file: %s\n", test_file);
-
-    f = fopen(test_file, "rb");
-    if (f == NULL) {
-      printf("ERROR: testing data file not found!\n");
-      return 0;
-    }
-    fclose(f);
-    test_mode = 1;
-  }
-
-  // search for class size
-  i = argPos(const_cast<char *>("-class_size"), argc, argv);
-  if (i > 0) {
-    if (i + 1 == argc) {
-      printf("ERROR: class size not specified!\n");
-      return 0;
-    }
-
-    class_size = atoi(argv[i + 1]);
-
-    if (debug_mode > 0)
-      printf("class size: %d\n", class_size);
-  }
-  if (class_size <= 0) {
-    printf("ERROR: no or invalid class size received!\n");
-    return 0;
-  }
-
-  init_class();
-
-  create_data(train_file, "train_data.bin");
-  if (valid_mode) create_data(valid_file, "valid_data.bin");
-  if (test_mode) create_data(test_file, "test_data.bin");
-
-  return 0;
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/job.conf
----------------------------------------------------------------------
diff --git a/examples/rnnlm/job.conf b/examples/rnnlm/job.conf
deleted file mode 100644
index aca1166..0000000
--- a/examples/rnnlm/job.conf
+++ /dev/null
@@ -1,120 +0,0 @@
-name: "rnnlm"
-#To scan the training file (81350) 10 times
-train_steps:81350
-#To scan the validation file (6828) once
-validate_steps:683
-validate_freq:8135
-#disp_freq is specific to training
-disp_freq:8135
-train_one_batch {
-alg: kBP
-}
-updater{
-  type: kSGD
-  learning_rate {
-    type: kFixedStep
-    fixedstep_conf:{
-      step:0
-      step:48810
-      step:56945
-      step:65080
-      step:73215
-      step_lr:0.1
-      step_lr:0.05
-      step_lr:0.025
-      step_lr:0.0125
-      step_lr:0.00625
-    }
-  }
-}
-
-neuralnet {
-layer {
-  name: "data"
-  user_type: "kData"
-  [data_conf] {
-    backend: "kvfile"
-    path: "examples/rnnlm/train_data.bin"
-    max_window: 10
-  }
-  include: kTrain
-}
-
-layer {
-  name: "data"
-  user_type: "kData"
-  [data_conf] {
-    path: "examples/rnnlm/valid_data.bin"
-    max_window: 10
-  }
-  include: kVal
-}
-
-layer{
-  name: "embedding"
-  user_type: "kEmbedding"
-  srclayers: "data"
-  [embedding_conf] {
-    word_dim: 15
-    vocab_size: 3720
-  }
-    param {
-    name: "w1"
-    init {
-       type: kUniform
-       low:-0.3
-       high:0.3
-    }
-  }
-}
-
-layer{
-  name: "hidden"
-  user_type: "kHidden"
-  srclayers:"embedding"
-  param{
-    name: "w2"
-    init {
-      type: kUniform
-      low:-0.3
-      high:0.3
-    }
-  }
-}
-layer{
-  name: "loss"
-  user_type: "kLoss"
-  srclayers:"hidden"
-  srclayers:"data"
-  [loss_conf] {
-    nclass:100
-    vocab_size: 3720
-  }
-  param{
-    name: "w3"
-    init {
-      type: kUniform
-      low:-0.3
-      high:0.3
-    }
-  }
-  param{
-    name: "w4"
-    init {
-      type: kUniform
-      low:-0.3
-      high:0.3
-    }
-  }
-}
-
-}
-cluster {
-  nworker_groups: 1
-  nserver_groups: 1
-  nservers_per_group: 1
-  nworkers_per_group: 1
-  nservers_per_procs: 1
-  nworkers_per_procs: 1
-  workspace: "examples/rnnlm/"
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/main.cc
----------------------------------------------------------------------
diff --git a/examples/rnnlm/main.cc b/examples/rnnlm/main.cc
deleted file mode 100644
index 9124383..0000000
--- a/examples/rnnlm/main.cc
+++ /dev/null
@@ -1,49 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#include <glog/logging.h>
-#include <string>
-#include "singa/singa.h"
-#include "rnnlm.h"
-#include "rnnlm.pb.h"
-
-int main(int argc, char **argv) {
-  // initialize glog before creating the driver
-  google::InitGoogleLogging(argv[0]);
-  
-  singa::Driver driver;
-  driver.Init(argc, argv);
-
-  // if -resume in argument list, set resume to true; otherwise false
-  int resume_pos = singa::ArgPos(argc, argv, "-resume");
-  bool resume = (resume_pos != -1);
-
-  // register all layers for rnnlm
-  driver.RegisterLayer<rnnlm::EmbeddingLayer, std::string>("kEmbedding");
-  driver.RegisterLayer<rnnlm::HiddenLayer, std::string>("kHidden");
-  driver.RegisterLayer<rnnlm::LossLayer, std::string>("kLoss");
-  driver.RegisterLayer<rnnlm::DataLayer, std::string>("kData");
-
-  singa::JobProto jobConf = driver.job_conf();
-
-  driver.Train(resume, jobConf);
-  return 0;
-}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/rnnlm.cc
----------------------------------------------------------------------
diff --git a/examples/rnnlm/rnnlm.cc b/examples/rnnlm/rnnlm.cc
deleted file mode 100644
index 641b465..0000000
--- a/examples/rnnlm/rnnlm.cc
+++ /dev/null
@@ -1,335 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-#include "./rnnlm.h"
-
-#include <string>
-#include <algorithm>
-#include "mshadow/tensor.h"
-#include "mshadow/tensor_expr.h"
-#include "mshadow/cxxnet_op.h"
-#include "./rnnlm.pb.h"
-
-namespace rnnlm {
-using std::vector;
-using std::string;
-
-using namespace mshadow;
-using mshadow::cpu;
-using mshadow::Shape;
-using mshadow::Shape1;
-using mshadow::Shape2;
-using mshadow::Tensor;
-
-inline Tensor<cpu, 2> RTensor2(Blob<float>* blob) {
-  const vector<int>& shape = blob->shape();
-  Tensor<cpu, 2> tensor(blob->mutable_cpu_data(),
-      Shape2(shape[0], blob->count() / shape[0]));
-  return tensor;
-}
-
-inline Tensor<cpu, 1> RTensor1(Blob<float>* blob) {
-  Tensor<cpu, 1> tensor(blob->mutable_cpu_data(), Shape1(blob->count()));
-  return tensor;
-}
-
-
-/*******DataLayer**************/
-DataLayer::~DataLayer() {
-  if (store_ != nullptr)
-    delete store_;
-}
-
-void DataLayer::Setup(const LayerProto& conf, const vector<Layer*>& srclayers) {
-  RNNLayer::Setup(conf, srclayers);
-  string key;
-  max_window_ = conf.GetExtension(data_conf).max_window();
-  data_.Reshape(vector<int>{max_window_ + 1, 4});
-  window_ = 0;
-}
-
-void SetInst(int k, const WordRecord& word, Blob<float>* to) {
-  float* dptr = to->mutable_cpu_data() + k * 4;
-  dptr[0] = static_cast<float>(word.word_index());
-  dptr[1] = static_cast<float>(word.class_index());
-  dptr[2] = static_cast<float>(word.class_start());
-  dptr[3] = static_cast<float>(word.class_end());
-}
-
-void ShiftInst(int from, int to,  Blob<float>* data) {
-  const float* f = data->cpu_data() + from * 4;
-  float* t = data->mutable_cpu_data() + to * 4;
-  // hard code the feature dim to be 4;
-  t[0] = f[0]; t[1] = f[1]; t[2] = f[2]; t[3] = f[3];
-}
-
-void DataLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
-  string key, value;
-  WordRecord word;
-  if (store_ == nullptr) {
-    store_ = singa::io::OpenStore(
-        layer_conf_.GetExtension(data_conf).backend(),
-        layer_conf_.GetExtension(data_conf).path(),
-        singa::io::kRead);
-    store_->Read(&key, &value);
-    word.ParseFromString(value);
-    SetInst(0, word, &data_);
-  }
-  ShiftInst(window_, 0, &data_);
-  window_ = max_window_;
-  for (int i = 1; i <= max_window_; i++) {
-    if (!store_->Read(&key, &value)) {
-      store_->SeekToFirst();
-      CHECK(store_->Read(&key, &value));
-    }
-    word.ParseFromString(value);
-    SetInst(i, word, &data_);
-    if (word.word_index() == 0) {
-      window_ = i;
-      break;
-    }
-  }
-}
-
-/*******LabelLayer**************
-void LabelLayer::Setup(const LayerProto& conf,
-    const vector<Layer*>& srclayers) {
-  RNNLayer::Setup(conf, srclayers);
-  CHECK_EQ(srclayers.size(), 1);
-  int max_window = dynamic_cast<DataLayer*>(srclayers[0])->max_window();
-  data_.Reshape(vector<int>{max_window, 4});
-}
-
-void LabelLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
-  const auto& records = dynamic_cast<DataLayer*>(srclayers[0])->records();
-  float *label = data_.mutable_cpu_data();
-  window_ = dynamic_cast<RNNLayer*>(srclayers[0])->window();
-  for (int i = 0; i < window_; i++) {
-    WordRecord wordrecord = records[i + 1].GetExtension(word);
-    label[4 * i + 0] = wordrecord.class_start();
-    label[4 * i + 1] = wordrecord.class_end();
-    label[4 * i + 2] = wordrecord.word_index();
-    label[4 * i + 3] = wordrecord.class_index();
-  }
-}
-*/
-
-/*******EmbeddingLayer**************/
-EmbeddingLayer::~EmbeddingLayer() {
-  delete embed_;
-}
-
-void EmbeddingLayer::Setup(const LayerProto& conf,
-    const vector<Layer*>& srclayers) {
-  RNNLayer::Setup(conf, srclayers);
-  CHECK_EQ(srclayers.size(), 1);
-  int max_window = srclayers[0]->data(this).shape()[0];
-  word_dim_ = conf.GetExtension(embedding_conf).word_dim();
-  data_.Reshape(vector<int>{max_window, word_dim_});
-  grad_.ReshapeLike(data_);
-  vocab_size_ = conf.GetExtension(embedding_conf).vocab_size();
-  embed_ = Param::Create(conf.param(0));
-  embed_->Setup(vector<int>{vocab_size_, word_dim_});
-}
-
-void EmbeddingLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
-  auto datalayer = dynamic_cast<DataLayer*>(srclayers[0]);
-  window_ = datalayer->window();
-  auto words = RTensor2(&data_);
-  auto embed = RTensor2(embed_->mutable_data());
-
-  const float* idxptr = datalayer->data(this).cpu_data();
-  for (int t = 0; t < window_; t++) {
-    int idx = static_cast<int>(idxptr[t * 4]);
-    CHECK_GE(idx, 0);
-    CHECK_LT(idx, vocab_size_);
-    Copy(words[t], embed[idx]);
-  }
-}
-
-void EmbeddingLayer::ComputeGradient(int flag,
-    const vector<Layer*>& srclayers) {
-  auto grad = RTensor2(&grad_);
-  auto gembed = RTensor2(embed_->mutable_grad());
-  auto datalayer = dynamic_cast<DataLayer*>(srclayers[0]);
-  gembed = 0;
-  const float* idxptr = datalayer->data(this).cpu_data();
-  for (int t = 0; t < window_; t++) {
-    int idx = static_cast<int>(idxptr[t * 4]);
-    Copy(gembed[idx], grad[t]);
-  }
-}
-/***********HiddenLayer**********/
-HiddenLayer::~HiddenLayer() {
-  delete weight_;
-}
-
-void HiddenLayer::Setup(const LayerProto& conf,
-    const vector<Layer*>& srclayers) {
-  RNNLayer::Setup(conf, srclayers);
-  CHECK_EQ(srclayers.size(), 1);
-  const auto& innerproductData = srclayers[0]->data(this);
-  data_.ReshapeLike(srclayers[0]->data(this));
-  grad_.ReshapeLike(srclayers[0]->grad(this));
-  int word_dim = data_.shape()[1];
-  weight_ = Param::Create(conf.param(0));
-  weight_->Setup(std::vector<int>{word_dim, word_dim});
-}
-
-// hid[t] = sigmoid(hid[t-1] * W + src[t])
-void HiddenLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
-  window_ = dynamic_cast<RNNLayer*>(srclayers[0])->window();
-  auto data = RTensor2(&data_);
-  auto src = RTensor2(srclayers[0]->mutable_data(this));
-  auto weight = RTensor2(weight_->mutable_data());
-  for (int t = 0; t < window_; t++) {  // Skip the 1st component
-    if (t == 0) {
-      data[t] = expr::F<op::sigmoid>(src[t]);
-    } else {
-      data[t] = dot(data[t - 1], weight);
-      data[t] += src[t];
-      data[t] = expr::F<op::sigmoid>(data[t]);
-    }
-  }
-}
-
-void HiddenLayer::ComputeGradient(int flag, const vector<Layer*>& srclayers) {
-  auto data = RTensor2(&data_);
-  auto grad = RTensor2(&grad_);
-  auto weight = RTensor2(weight_->mutable_data());
-  auto gweight = RTensor2(weight_->mutable_grad());
-  auto gsrc = RTensor2(srclayers[0]->mutable_grad(this));
-  gweight = 0;
-  TensorContainer<cpu, 1> tmp(Shape1(data_.shape()[1]));
-  // Check!!
-  for (int t = window_ - 1; t >= 0; t--) {
-    if (t < window_ - 1) {
-      tmp = dot(grad[t + 1], weight.T());
-      grad[t] += tmp;
-    }
-    grad[t] = expr::F<op::sigmoid_grad>(data[t])* grad[t];
-  }
-  gweight = dot(data.Slice(0, window_-1).T(), grad.Slice(1, window_));
-  Copy(gsrc, grad);
-}
-
-/*********** Implementation for LossLayer **********/
-LossLayer::~LossLayer() {
-  delete word_weight_;
-  delete class_weight_;
-}
-
-void LossLayer::Setup(const LayerProto& conf, const vector<Layer*>& srclayers) {
-  RNNLayer::Setup(conf, srclayers);
-  CHECK_EQ(srclayers.size(), 2);
-  const auto& src = srclayers[0]->data(this);
-  int max_window = src.shape()[0];
-  int vdim = src.count() / max_window;   // Dimension of input
-  int vocab_size = conf.GetExtension(loss_conf).vocab_size();
-  int nclass = conf.GetExtension(loss_conf).nclass();
-  word_weight_ = Param::Create(conf.param(0));
-  word_weight_->Setup(vector<int>{vocab_size, vdim});
-  class_weight_ = Param::Create(conf.param(1));
-  class_weight_->Setup(vector<int>{nclass, vdim});
-
-  pword_.resize(max_window);
-  pclass_.Reshape(vector<int>{max_window, nclass});
-}
-
-void LossLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
-  window_ = dynamic_cast<RNNLayer*>(srclayers[0])->window();
-  auto pclass = RTensor2(&pclass_);
-  auto src = RTensor2(srclayers[0]->mutable_data(this));
-  auto word_weight = RTensor2(word_weight_->mutable_data());
-  auto class_weight = RTensor2(class_weight_->mutable_data());
-  const float * label = srclayers[1]->data(this).cpu_data();
-
-  float loss = 0.f, ppl = 0.f;
-  for (int t = 0; t < window_; t++) {
-    // label is the next word
-    int start = static_cast<int>(label[(t + 1) * 4 + 2]);
-    int end = static_cast<int>(label[(t + 1) * 4 + 3]);
-
-    auto wordWeight = word_weight.Slice(start, end);
-    CHECK_GT(end, start);
-    pword_[t].Reshape(std::vector<int>{end-start});
-    auto pword = RTensor1(&pword_[t]);
-    pword = dot(src[t], wordWeight.T());
-    Softmax(pword, pword);
-
-    pclass[t] = dot(src[t], class_weight.T());
-    Softmax(pclass[t], pclass[t]);
-
-    int wid = static_cast<int>(label[(t + 1) * 4 + 0]);
-    int cid = static_cast<int>(label[(t + 1) * 4 + 1]);
-    CHECK_GT(end, wid);
-    CHECK_GE(wid, start);
-    loss_ += -log(std::max(pword[wid - start] * pclass[t][cid], FLT_MIN));
-    ppl_ += log10(std::max(pword[wid - start] * pclass[t][cid], FLT_MIN));
-  }
-  num_ += window_;
-}
-
-void LossLayer::ComputeGradient(int flag, const vector<Layer*>& srclayers) {
-  auto pclass = RTensor2(&pclass_);
-  auto src = RTensor2(srclayers[0]->mutable_data(this));
-  auto gsrc = RTensor2(srclayers[0]->mutable_grad(this));
-  auto word_weight = RTensor2(word_weight_->mutable_data());
-  auto gword_weight = RTensor2(word_weight_->mutable_grad());
-  auto class_weight = RTensor2(class_weight_->mutable_data());
-  auto gclass_weight = RTensor2(class_weight_->mutable_grad());
-  const float * label = srclayers[1]->data(this).cpu_data();
-  gclass_weight = 0;
-  gword_weight = 0;
-  for (int t = 0; t < window_; t++) {
-    int start = static_cast<int>(label[(t + 1) * 4 + 2]);
-    int end = static_cast<int>(label[(t + 1) * 4 + 3]);
-    int wid = static_cast<int>(label[(t + 1) * 4 + 0]);
-    int cid = static_cast<int>(label[(t + 1) * 4 + 1]);
-    auto pword = RTensor1(&pword_[t]);
-    CHECK_GT(end, wid);
-    CHECK_GE(wid, start);
-
-    // gL/gclass_act
-    pclass[t][cid] -= 1.0;
-    // gL/gword_act
-    pword[wid - start] -= 1.0;
-
-    // gL/gword_weight
-    gword_weight.Slice(start, end) += dot(pword.FlatTo2D().T(),
-                                          src[t].FlatTo2D());
-    // gL/gclass_weight
-    gclass_weight += dot(pclass[t].FlatTo2D().T(),
-                         src[t].FlatTo2D());
-
-    gsrc[t] = dot(pword, word_weight.Slice(start, end));
-    gsrc[t] += dot(pclass[t], class_weight);
-  }
-}
-
-const std::string LossLayer::ToString(bool debug, int flag) {
-  float loss = loss_ / num_;
-  float ppl = exp10(- ppl_ / num_);
-  loss_ = 0;
-  num_ = 0;
-  ppl_ = 0;
-  return "loss = " + std::to_string(loss) + ", ppl = " + std::to_string(ppl);
-}
-}   // end of namespace rnnlm

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/rnnlm.h
----------------------------------------------------------------------
diff --git a/examples/rnnlm/rnnlm.h b/examples/rnnlm/rnnlm.h
deleted file mode 100644
index 0e415e3..0000000
--- a/examples/rnnlm/rnnlm.h
+++ /dev/null
@@ -1,158 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-#ifndef EXAMPLES_RNNLM_RNNLM_H_
-#define EXAMPLES_RNNLM_RNNLM_H_
-
-#include <string>
-#include <vector>
-#include "singa/singa.h"
-#include "./rnnlm.pb.h"
-
-namespace rnnlm {
-using std::vector;
-using singa::LayerProto;
-using singa::Layer;
-using singa::Param;
-using singa::Blob;
-using singa::Metric;
-/**
- * Base RNN layer. May make it a base layer of SINGA.
- */
-class RNNLayer : virtual public singa::Layer {
- public:
-  /**
-   * The recurrent layers may be unrolled different times for different
-   * iterations, depending on the applications. For example, the ending word
-   * of a sentence may stop the unrolling; unrolling also stops when the max
-   * window size is reached. Every layer must reset window_ in its
-   * ComputeFeature function.
-   *
-   * @return the effective BPTT length, which is <= max_window.
-   */
-  inline int window() { return window_; }
-
- protected:
-  //!< effect window size for BPTT
-  int window_;
-};
-
-/**
- * Input layer that get read records from data shard
- */
-class DataLayer : public RNNLayer, public singa::InputLayer {
- public:
-  ~DataLayer();
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  int max_window() const {
-    return max_window_;
-  }
-
- private:
-  int max_window_;
-  singa::io::Store* store_ = nullptr;
-};
-
-
-/**
- * LabelLayer that read records_[1] to records_[window_] from DataLayer to
- * offer label information
-class LabelLayer : public RNNLayer {
- public:
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override {}
-};
- */
-
-
-/**
- * Word embedding layer that get one row from the embedding matrix for each
- * word based on the word index
- */
-class EmbeddingLayer : public RNNLayer {
- public:
-  ~EmbeddingLayer();
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-  const std::vector<Param*> GetParams() const override {
-    std::vector<Param*> params{embed_};
-    return params;
-  }
-
-
- private:
-  int word_dim_;
-  int vocab_size_;
-  //!< word embedding matrix of size vocab_size_ x word_dim_
-  Param* embed_;
-};
-
-
-/**
- * hid[t] = sigmoid(hid[t-1] * W + src[t])
- */
-class HiddenLayer : public RNNLayer {
- public:
-  ~HiddenLayer();
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-
-  const std::vector<Param*> GetParams() const override {
-    std::vector<Param*> params{weight_};
-    return params;
-  }
-
-
- private:
-  Param* weight_;
-};
-
-/**
- * p(word at t+1 is from class c) = softmax(src[t]*Wc)[c]
- * p(w|c) = softmax(src[t]*Ww[Start(c):End(c)])
- * p(word at t+1 is w)=p(word at t+1 is from class c)*p(w|c)
- */
-class LossLayer : public RNNLayer {
- public:
-  ~LossLayer();
-  void Setup(const LayerProto& conf, const vector<Layer*>& srclayers) override;
-  void ComputeFeature(int flag, const vector<Layer*>& srclayers) override;
-  void ComputeGradient(int flag, const vector<Layer*>& srclayers) override;
-
-  const std::string ToString(bool debug, int flag) override;
-  const std::vector<Param*> GetParams() const override {
-    std::vector<Param*> params{word_weight_, class_weight_};
-    return params;
-  }
-
- private:
-  std::vector<Blob<float>> pword_;
-  Blob<float> pclass_;
-  Param* word_weight_, *class_weight_;
-  float loss_, ppl_;
-  int num_;
-};
-}  // namespace rnnlm
-#endif  // EXAMPLES_RNNLM_RNNLM_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/dd1e4afa/examples/rnnlm/rnnlm.proto
----------------------------------------------------------------------
diff --git a/examples/rnnlm/rnnlm.proto b/examples/rnnlm/rnnlm.proto
deleted file mode 100644
index 4a4dcbc..0000000
--- a/examples/rnnlm/rnnlm.proto
+++ /dev/null
@@ -1,53 +0,0 @@
-/************************************************************
-*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*   http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied.  See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*
-*************************************************************/
-
-import "job.proto";
-import "common.proto";
-
-message EmbeddingProto {
-  optional int32 word_dim = 1;
-  optional int32 vocab_size = 2;
-}
-
-message LossProto {
-  optional int32 nclass = 1;
-  optional int32 vocab_size = 2;
-}
-
-message DataProto {
-  required string path = 1;
-  optional int32 max_window = 2;
-  optional string backend = 3 [default = "kvfile"];
-}
-
-extend singa.LayerProto {
-  optional EmbeddingProto embedding_conf = 1001;
-  optional LossProto loss_conf = 1002;
-  optional DataProto data_conf = 1003;
-}
-
-message WordRecord {
-  optional string word = 1;
-  optional int32 word_index = 2;
-  optional int32 class_index = 3;
-  optional int32 class_start = 4;
-  optional int32 class_end = 5;
-}