You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by wa...@apache.org on 2015/08/19 11:00:01 UTC

incubator-singa git commit: SINGA-61 Support user defined classes

Repository: incubator-singa
Updated Branches:
  refs/heads/master 14be10156 -> 97141e2e0


SINGA-61 Support user defined classes

To support user defined classes, we need to set a type field for each class in the configuration.
E.g., the LayerProto has a type field. But this type field can only be set to built-in layer types.
This commit adds a user_type field for user defined layers. E.g.,

    LayerProto {
      optional string user_type = 10;
    }

A helper function Layer::Create(const LayerProto&) is added to create a Layer object based on configured type.

Similar updates are applied for other classes, namely, Param, Worker, Updater.


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/97141e2e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/97141e2e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/97141e2e

Branch: refs/heads/master
Commit: 97141e2e0e81045a6bbedee14d88e98a4ad6c136
Parents: 14be101
Author: Wei Wang <wa...@comp.nus.edu.sg>
Authored: Tue Aug 18 21:16:40 2015 +0800
Committer: Wei Wang <wa...@comp.nus.edu.sg>
Committed: Wed Aug 19 16:24:19 2015 +0800

----------------------------------------------------------------------
 examples/cifar10/Makefile.example |   2 +-
 examples/cifar10/job.conf         |   2 -
 examples/mnist/Makefile.example   |   2 +-
 include/driver.h                  | 109 +++++++++++++++++++++++++++++++++
 include/neuralnet/base_layer.h    |   2 +
 include/singa.h                   |  82 +------------------------
 include/trainer/worker.h          |   1 +
 include/utils/param.h             |   1 +
 include/utils/updater.h           |   1 +
 src/driver.cc                     |  72 ++++++++--------------
 src/neuralnet/base_layer.cc       |   9 +++
 src/neuralnet/layer.cc            |  21 +++----
 src/neuralnet/neuralnet.cc        |   9 ++-
 src/proto/common.proto            |   3 +-
 src/proto/job.proto               |  92 ++++++++++++++++++++--------
 src/trainer/server.cc             |   2 +-
 src/trainer/trainer.cc            |   3 +-
 src/trainer/worker.cc             |   9 +++
 src/utils/param.cc                |  11 ++++
 src/utils/updater.cc              |  11 ++++
 20 files changed, 265 insertions(+), 179 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/examples/cifar10/Makefile.example
----------------------------------------------------------------------
diff --git a/examples/cifar10/Makefile.example b/examples/cifar10/Makefile.example
index 5d3fa21..744b256 100644
--- a/examples/cifar10/Makefile.example
+++ b/examples/cifar10/Makefile.example
@@ -9,7 +9,7 @@ cifar-10-binary-bin:
 	tar xf cifar-10-binary.tar.gz
 
 create:
-	$(CXX) create_shard.cc -std=c++11 -lsinga -lprotobuf -lglog -lzookeeper_mt \
+	$(CXX) create_shard.cc -std=c++11 -lsinga -lprotobuf -lglog \
 		-I../../include -L../../.libs/ -Wl,-unresolved-symbols=ignore-in-shared-libs \
 		-Wl,-rpath=../../.libs/  -o create_shard.bin
 	mkdir cifar10_train_shard

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/examples/cifar10/job.conf
----------------------------------------------------------------------
diff --git a/examples/cifar10/job.conf b/examples/cifar10/job.conf
index fdf6167..b294f03 100644
--- a/examples/cifar10/job.conf
+++ b/examples/cifar10/job.conf
@@ -94,7 +94,6 @@ neuralnet {
     name: "norm1"
     type: kLRN
     lrn_conf {
-      norm_region: WITHIN_CHANNEL
       local_size: 3
       alpha: 5e-05
       beta: 0.75
@@ -143,7 +142,6 @@ neuralnet {
     name: "norm2"
     type: kLRN
     lrn_conf {
-      norm_region: WITHIN_CHANNEL
       local_size: 3
       alpha: 5e-05
       beta: 0.75

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/examples/mnist/Makefile.example
----------------------------------------------------------------------
diff --git a/examples/mnist/Makefile.example b/examples/mnist/Makefile.example
index 87399fe..9016887 100644
--- a/examples/mnist/Makefile.example
+++ b/examples/mnist/Makefile.example
@@ -13,7 +13,7 @@ mnist:
 	gunzip t10k-images-idx3-ubyte.gz && gunzip t10k-labels-idx1-ubyte.gz
 
 create:
-	$(CXX) create_shard.cc -std=c++11 -lsinga -lprotobuf -lglog -lzookeeper_mt -I../../include \
+	$(CXX) create_shard.cc -std=c++11 -lsinga -lprotobuf -lglog -I../../include \
 		-L../../.libs/ -Wl,-unresolved-symbols=ignore-in-shared-libs -Wl,-rpath=../../.libs/ \
 		-o create_shard.bin
 	mkdir mnist_train_shard

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/include/driver.h
----------------------------------------------------------------------
diff --git a/include/driver.h b/include/driver.h
new file mode 100644
index 0000000..fcaab12
--- /dev/null
+++ b/include/driver.h
@@ -0,0 +1,109 @@
+#ifndef SINGA_DRIVER_H_
+#define SINGA_DRIVER_H_
+#include "singa.h"
+
+namespace singa {
+
+class Driver {
+ public:
+  /**
+   * Init SINGA, including init glog, parse job id and job conf from cmd line,
+   * and register built-in layer, worker, updater, param subclasses.
+   *
+   * May be used for MPI init if it is used for message passing.
+   */
+  void Init(int argc, char** argv);
+  /**
+   * Register a Layer subclass.
+   *
+   * @param type layer type ID. If called to register built-in subclasses,
+   * it is from LayerType; if called to register user-defined
+   * subclass, it is a string;
+   * @return 0 if success; otherwise -1.
+   */
+  template<typename Subclass, typename Type>
+  int RegisterLayer(const Type& type);
+  /**
+   * Register an Updater subclass.
+   *
+   * @param type ID of the subclass. If called to register built-in subclasses,
+   * it is from UpdaterType; if called to register user-defined
+   * subclass, it is a string;
+   * @return 0 if success; otherwise -1.
+   */
+  template<typename Subclass, typename Type>
+  int RegisterUpdater(const Type& type);
+  /**
+   * Register a Worker subclass.
+   *
+   * @param type ID of the subclass. If called to register built-in subclasses,
+   * it is from TrainOneBatchAlg; if called to register user-defined
+   * subclass, it is a string;
+   * @return 0 if success; otherwise -1.
+   */
+  template<typename Subclass, typename Type>
+  int RegisterWorker(const Type& type);
+  /**
+   * Register a Param subclass.
+   * @param type ID of the subclass. If called to register built-in subclasses,
+   * it is from ParamType; if called to register user-defined
+   * subclass, it is a string;
+   *
+   * @return 0 if success; otherwise -1.
+   */
+  template<typename Subclass, typename Type>
+  int RegisterParam(const Type& type);
+  /**
+   * Submit the job configuration for starting the job.
+   * @param resume resume from last checkpoint if true.
+   * @param job job configuration
+   */
+  void Submit(bool resume, const JobProto& job);
+  /**
+   * @return job ID which is generated by zookeeper and passed in by the
+   * launching script.
+   */
+  inline int job_id() const { return job_id_; }
+  /**
+   * @return job conf path which is passed by users at the command line. It
+   * should at least contains the cluster configuration.
+   */
+  inline JobProto job_conf() const { return job_conf_; }
+
+ private:
+  int job_id_;
+  JobProto job_conf_;
+  SingaProto singa_conf_;
+};
+
+template<typename Subclass, typename Type>
+int Driver::RegisterLayer(const Type& type) {
+  auto factory = Singleton<Factory<singa::Layer>>::Instance();
+  factory->Register(type, CreateInstance(Subclass, Layer));
+  return 1;
+}
+
+template<typename Subclass, typename Type>
+int Driver::RegisterParam(const Type& type) {
+  auto factory = Singleton<Factory<singa::Param>>::Instance();
+  factory->Register(type, CreateInstance(Subclass, Param));
+  return 1;
+}
+template<typename Subclass, typename Type>
+int Driver::RegisterUpdater(const Type& type) {
+  auto factory = Singleton<Factory<singa::Updater>>::Instance();
+  factory->Register(type, CreateInstance(Subclass, Updater));
+  return 1;
+}
+template<typename Subclass, typename Type>
+int Driver::RegisterWorker(const Type& type) {
+  auto factory = Singleton<Factory<singa::Worker>>::Instance();
+  factory->Register(type, CreateInstance(Subclass, Worker));
+  return 1;
+}
+
+
+}  // namespace singa
+#endif  // SINGA_DRIVER_H_
+
+

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/include/neuralnet/base_layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/base_layer.h b/include/neuralnet/base_layer.h
index 508fe18..5575fc7 100644
--- a/include/neuralnet/base_layer.h
+++ b/include/neuralnet/base_layer.h
@@ -21,6 +21,7 @@ using std::string;
 using std::map;
 
 class Layer;
+
 /**
  * Base layer class.
  *
@@ -30,6 +31,7 @@ class Layer;
  */
 class Layer {
  public:
+  static Layer *Create(const LayerProto& proto);
   Layer() { }
   virtual ~Layer() {}
   /**

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/include/singa.h
----------------------------------------------------------------------
diff --git a/include/singa.h b/include/singa.h
index 20f941d..a98af2b 100644
--- a/include/singa.h
+++ b/include/singa.h
@@ -1,11 +1,9 @@
 #ifndef SINGA_SINGA_H_
 #define SINGA_SINGA_H_
 
-#include <cblas.h>
-#include <glog/logging.h>
-#include <string>
 #include "communication/socket.h"
 #include "neuralnet/neuralnet.h"
+#include "neuralnet/layer.h"
 #include "proto/job.pb.h"
 #include "proto/singa.pb.h"
 #include "trainer/trainer.h"
@@ -13,82 +11,6 @@
 #include "utils/param.h"
 #include "utils/singleton.h"
 #include "utils/factory.h"
-
-namespace singa {
-
-class Driver {
- public:
-  /**
-   * Init SINGA, including init glog, parse job id and job conf from cmd line,
-   * and register built-in layer, worker, updater, param subclasses.
-   *
-   * May be used for MPI init if it is used for message passing.
-   */
-  void Init(int argc, char** argv);
-  /**
-   * Register a Layer subclass.
-   *
-   * T is the subclass.
-   * @param type layer type ID. If called by users, it should be different to
-   * the types of built-in layers.
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename T>
-  int RegisterLayer(int type);
-  /**
-   * Register Updater subclasses.
-   *
-   * T is the subclass.
-   * @param type updater type ID. If called by users, it should be different to
-   * the types of built-in updaters.
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename T>
-  int RegisterUpdater(int type);
-  /**
-   * Register Worker subclasses.
-   *
-   * T is the subclass.
-   * @param type worker type ID. If called by users, it should be different to
-   * the types of built-in workers
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename T>
-  int RegisterWorker(int type);
-  /**
-   * Register Param subclasses.
-   *
-   * T is the subclass.
-   * @param type param type. If called by users, it should be different to the
-   * types of built-in params. SINGA currently provides only one built-in Param
-   * implementation whose type ID is 0.
-   * @return 0 if success; otherwise -1.
-   */
-  template<typename T>
-  int RegisterParam(int type);
-  /**
-   * Submit the job configuration for starting the job.
-   * @param resume resume from last checkpoint if true.
-   * @param job job configuration
-   */
-  void Submit(bool resume, const JobProto& job);
-  /**
-   * @return job ID which is generated by zookeeper and passed in by the
-   * launching script.
-   */
-  inline int job_id() const { return job_id_; }
-  /**
-   * @return job conf path which is passed by users at the command line. It
-   * should at least contains the cluster configuration.
-   */
-  inline JobProto job_conf() const { return job_conf_; }
-
- private:
-  int job_id_;
-  JobProto job_conf_;
-  SingaProto singa_conf_;
-};
-
-}  // namespace singa
+#include "driver.h"
 
 #endif  // SINGA_SINGA_H_

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/include/trainer/worker.h
----------------------------------------------------------------------
diff --git a/include/trainer/worker.h b/include/trainer/worker.h
index 2adbc66..c50b54f 100644
--- a/include/trainer/worker.h
+++ b/include/trainer/worker.h
@@ -21,6 +21,7 @@ const int kCollectSleepTime=5;
  */
 class Worker {
  public:
+  static Worker* Create(const JobProto& proto);
   /**
    * @param thread_id local thread index within the procs
    * @param grp_id global worker group ID

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/include/utils/param.h
----------------------------------------------------------------------
diff --git a/include/utils/param.h b/include/utils/param.h
index be465f4..83f64ed 100644
--- a/include/utils/param.h
+++ b/include/utils/param.h
@@ -27,6 +27,7 @@
 namespace singa {
 class Param {
  public:
+  static Param* Create(const ParamProto& proto);
   Param();
   virtual ~Param() {}
   /**

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/include/utils/updater.h
----------------------------------------------------------------------
diff --git a/include/utils/updater.h b/include/utils/updater.h
index 99629cf..92ddf6c 100644
--- a/include/utils/updater.h
+++ b/include/utils/updater.h
@@ -10,6 +10,7 @@ namespace singa {
  */
 class Updater{
  public:
+  static Updater* Create(const UpdaterProto& proto);
   virtual ~Updater() {}
   virtual void Init(const UpdaterProto &proto) {
     proto_ = proto;

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/driver.cc
----------------------------------------------------------------------
diff --git a/src/driver.cc b/src/driver.cc
index 6d41fc5..b79b609 100644
--- a/src/driver.cc
+++ b/src/driver.cc
@@ -1,5 +1,9 @@
 #include "singa.h"
 
+#include <cblas.h>
+#include <glog/logging.h>
+#include <string>
+
 namespace singa {
 
 void Driver::Init(int argc, char **argv) {
@@ -22,28 +26,28 @@ void Driver::Init(int argc, char **argv) {
   ReadProtoFromTextFile(argv[arg_pos+1], &job_conf_);
 
   // register layers
-  RegisterLayer<BridgeDstLayer>(kBridgeDst);
-  RegisterLayer<BridgeSrcLayer>(kBridgeSrc);
-  RegisterLayer<ConvolutionLayer>(kConvolution);
-  RegisterLayer<ConcateLayer>(kConcate);
-  RegisterLayer<DropoutLayer>(kDropout);
-  RegisterLayer<InnerProductLayer>(kInnerProduct);
-  RegisterLayer<LabelLayer>(kLabel);
-  RegisterLayer<LRNLayer>(kLRN);
-  RegisterLayer<MnistLayer>(kMnist);
-  RegisterLayer<PrefetchLayer>(kPrefetch);
-  RegisterLayer<PoolingLayer>(kPooling);
-  RegisterLayer<RGBImageLayer>(kRGBImage);
-  RegisterLayer<ReLULayer>(kReLU);
-  RegisterLayer<ShardDataLayer>(kShardData);
-  RegisterLayer<SliceLayer>(kSlice);
-  RegisterLayer<SoftmaxLossLayer>(kSoftmaxLoss);
-  RegisterLayer<SplitLayer>(kSplit);
-  RegisterLayer<TanhLayer>(kTanh);
-  RegisterLayer<RBMVisLayer>(kRBMVis);
-  RegisterLayer<RBMHidLayer>(kRBMHid);
+  RegisterLayer<BridgeDstLayer, int>(kBridgeDst);
+  RegisterLayer<BridgeSrcLayer, int>(kBridgeSrc);
+  RegisterLayer<ConvolutionLayer, int>(kConvolution);
+  RegisterLayer<ConcateLayer, int>(kConcate);
+  RegisterLayer<DropoutLayer, int>(kDropout);
+  RegisterLayer<InnerProductLayer, int>(kInnerProduct);
+  RegisterLayer<LabelLayer, int>(kLabel);
+  RegisterLayer<LRNLayer, int>(kLRN);
+  RegisterLayer<MnistLayer, int>(kMnist);
+  RegisterLayer<PrefetchLayer, int>(kPrefetch);
+  RegisterLayer<PoolingLayer, int>(kPooling);
+  RegisterLayer<RGBImageLayer, int>(kRGBImage);
+  RegisterLayer<ReLULayer, int>(kReLU);
+  RegisterLayer<ShardDataLayer, int>(kShardData);
+  RegisterLayer<SliceLayer, int>(kSlice);
+  RegisterLayer<SoftmaxLossLayer, int>(kSoftmaxLoss);
+  RegisterLayer<SplitLayer, int>(kSplit);
+  RegisterLayer<TanhLayer, int>(kTanh);
+  RegisterLayer<RBMVisLayer, int>(kRBMVis);
+  RegisterLayer<RBMHidLayer, int>(kRBMHid);
 #ifdef USE_LMDB
-  RegisterLayer<LMDBDataLayer>(kLMDBData);
+  RegisterLayer<LMDBDataLayer, int>(kLMDBData);
 #endif
 
   // register updater
@@ -60,33 +64,7 @@ void Driver::Init(int argc, char **argv) {
   RegisterParam<Param>(0);
 }
 
-template<typename T>
-int Driver::RegisterLayer(int type) {
-  auto factory = Singleton<Factory<singa::Layer>>::Instance();
-  factory->Register(type, CreateInstance(T, Layer));
-  return 1;
-}
-
-template<typename T>
-int Driver::RegisterParam(int type) {
-  auto factory = Singleton<Factory<singa::Param>>::Instance();
-  factory->Register(type, CreateInstance(T, Param));
-  return 1;
-}
 
-template<typename T>
-int Driver::RegisterUpdater(int type) {
-  auto factory = Singleton<Factory<singa::Updater>>::Instance();
-  factory->Register(type, CreateInstance(T, Updater));
-  return 1;
-}
-
-template<typename T>
-int Driver::RegisterWorker(int type) {
-  auto factory = Singleton<Factory<singa::Worker>>::Instance();
-  factory->Register(type, CreateInstance(T, Worker));
-  return 1;
-}
 
 void Driver::Submit(bool resume, const JobProto& jobConf) {
   if (singa_conf_.has_log_dir())

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/neuralnet/base_layer.cc
----------------------------------------------------------------------
diff --git a/src/neuralnet/base_layer.cc b/src/neuralnet/base_layer.cc
index 695104e..f995353 100644
--- a/src/neuralnet/base_layer.cc
+++ b/src/neuralnet/base_layer.cc
@@ -9,6 +9,15 @@
 #include "neuralnet/base_layer.h"
 
 namespace singa {
+Layer *Layer::Create(const LayerProto& proto) {
+  auto* factory = Singleton<Factory<Layer>>::Instance();
+  Layer * layer = nullptr;
+  if (proto.has_user_type())
+    layer = factory->Create(proto.user_type());
+  else
+    layer = factory->Create(proto.type());
+  return layer;
+}
 
 void Layer::Setup(const LayerProto& proto, int npartitions) {
   CHECK_GE(npartitions, 1);

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/neuralnet/layer.cc
----------------------------------------------------------------------
diff --git a/src/neuralnet/layer.cc b/src/neuralnet/layer.cc
index 5c3d688..ae45ae8 100644
--- a/src/neuralnet/layer.cc
+++ b/src/neuralnet/layer.cc
@@ -5,7 +5,6 @@
 #include "mshadow/cxxnet_op.h"
 #include "neuralnet/layer.h"
 #include "utils/singleton.h"
-#include "utils/factory.h"
 
 using namespace mshadow;
 using namespace mshadow::expr;
@@ -71,10 +70,9 @@ void ConvolutionLayer::Setup(const LayerProto& proto, int npartitions) {
   col_data_.Reshape(vector<int>{col_height_, col_width_});
   col_grad_.Reshape(vector<int>{col_height_, col_width_});
 
-  Factory<Param>* factory=Singleton<Factory<Param>>::Instance();
-  weight_ = factory->Create(proto.param(0).type());
+  weight_ = Param::Create(proto.param(0));
+  bias_ = Param::Create(proto.param(1));
   weight_->Setup(proto.param(0), vector<int>{num_filters_, col_height_});
-  bias_ = factory->Create(proto.param(1).type());
   bias_->Setup(proto.param(1), vector<int>{num_filters_});
 }
 
@@ -188,9 +186,8 @@ void RBMVisLayer::Setup(const LayerProto& proto,
   hdim_ = proto.rbmvis_conf().num_output();
   data_.Reshape(vector<int>{batchsize_, vdim_});  // this is visible dimension
   vis_sample_.Reshape(vector<int>{neg_batchsize_, vdim_});
-  Factory<Param>* factory = Singleton<Factory<Param>>::Instance();
-  weight_ = factory->Create(proto.param(0).type());
-  bias_ = factory->Create(proto.param(1).type());
+  weight_ = Param::Create(proto.param(0));
+  bias_ = Param::Create(proto.param(1));
   weight_->Setup(proto.param(0), vector<int>{vdim_, hdim_});
   bias_->Setup(proto.param(1), vector<int>{vdim_});
 }
@@ -281,9 +278,8 @@ void RBMHidLayer::Setup(const LayerProto& proto,
   hdim_ = proto.rbmhid_conf().hid_dim();
   data_.Reshape(vector<int>{batchsize_, hdim_});
   hid_sample_.Reshape(vector<int>{neg_batchsize_, hdim_});
-  Factory<Param>* factory = Singleton<Factory<Param>>::Instance();
-  weight_ = factory->Create(proto.param(0).type());
-  bias_ = factory->Create(proto.param(0).type());
+  weight_ = Param::Create(proto.param(0));
+  bias_ = Param::Create(proto.param(1));
   weight_->Setup(proto.param(0), vector<int>{vdim_, hdim_});
   bias_->Setup(proto.param(1), vector<int>{hdim_});
 }
@@ -338,9 +334,8 @@ void InnerProductLayer::Setup(const LayerProto& proto, int npartitions) {
     hdim_ /= npartitions;
   data_.Reshape(vector<int>{batchsize_, hdim_});
   grad_.ReshapeLike(data_);
-  Factory<Param>* factory=Singleton<Factory<Param>>::Instance();
-  weight_ = factory->Create(proto.param(0).type());
-  bias_ = factory->Create(proto.param(0).type());
+  weight_ = Param::Create(proto.param(0));
+  bias_ = Param::Create(proto.param(1));
   weight_->Setup(proto.param(0), vector<int>{hdim_, vdim_});
   bias_->Setup(proto.param(1), vector<int>{hdim_});
 }

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/neuralnet/neuralnet.cc
----------------------------------------------------------------------
diff --git a/src/neuralnet/neuralnet.cc b/src/neuralnet/neuralnet.cc
index 08e8e0f..c2e29a0 100644
--- a/src/neuralnet/neuralnet.cc
+++ b/src/neuralnet/neuralnet.cc
@@ -75,10 +75,10 @@ NeuralNet::NeuralNet(NetProto netproto, int npartitions) {
 }
 
 void NeuralNet::CreateNetFromGraph(Graph* graph, int npartitions) {
-  auto* factory = Singleton<Factory<Layer>>::Instance();
   // create one layer per node
   for (Node* node : graph->nodes()) {
-    auto layer = factory->Create(static_cast<LayerProto*>(node->proto)->type());
+    auto proto_ptr =  static_cast<LayerProto*>(node->proto);
+    auto layer = Layer::Create(*proto_ptr);
     layers_.push_back(layer);
     name2layer_[node->name] = layer;
   }
@@ -267,7 +267,6 @@ Graph* NeuralNet::CreateGraph(const NetProto& netproto, int npartitions) {
   }
 
   // connect nodes, nodes for ConcateLayer, SliceLayer and SplitLayer are added.
-  auto* factory = Singleton<Factory<Layer>>::Instance();
   for (const auto& layerproto : netproto.layer()) {
     string name = layerproto.name();
     int pdim = layerproto.partition_dim();
@@ -275,7 +274,7 @@ Graph* NeuralNet::CreateGraph(const NetProto& netproto, int npartitions) {
     for (auto srcname : layerproto.srclayers()) {
       const vector<Node*>& srcnodes = name2nodes.at(srcname);
       // TODO(wangwei): consider the type of each connection
-      auto *layer = factory->Create(layerproto.type());
+      Layer *layer = Layer::Create(layerproto);
       ConnectionType connection = layer->src_neuron_connection(0);
       delete layer;
       int src_pdim = name2proto[srcname]->partition_dim();
@@ -314,7 +313,7 @@ Graph* NeuralNet::CreateGraph(const NetProto& netproto, int npartitions) {
   // add nodes for SplitLayer
   vector<Node*> oldnodes = graph->nodes();
   for (Node* node : oldnodes) {
-    auto layer = factory->Create(static_cast<LayerProto*>(node->proto)->type());
+    auto layer = Layer::Create(*static_cast<LayerProto*>(node->proto));
     if (node->dstnodes.size() > 1
         && layer->dst_layer_connection() == kOneToOne) {
       vector<Node*> dstnodes = node->dstnodes;

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/proto/common.proto
----------------------------------------------------------------------
diff --git a/src/proto/common.proto b/src/proto/common.proto
index 8927f2b..671d6ad 100644
--- a/src/proto/common.proto
+++ b/src/proto/common.proto
@@ -72,8 +72,9 @@ message Record {
     kSingleLabelImage = 0;
   }
   optional Type type = 1 [default = kSingleLabelImage];
+  optional string user_type =2;
   // configuration for
-  optional SingleLabelImageRecord image = 2;
+  optional SingleLabelImageRecord image = 5;
 }
 
 message SingleLabelImageRecord {

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/proto/job.proto
----------------------------------------------------------------------
diff --git a/src/proto/job.proto b/src/proto/job.proto
index fafd8e6..80c8b65 100644
--- a/src/proto/job.proto
+++ b/src/proto/job.proto
@@ -22,7 +22,9 @@ message JobProto {
   // neural net consits of a set of connected layers
   required NetProto neuralnet = 3;
   // algorithms calculating gradients for one mini-batch/iteration
-  required TrainOneBatchAlg alg = 5;
+  optional TrainOneBatchAlg alg = 5 [default = kUserAlg];
+  // user defined algorithm
+  optional string user_alg = 6;
   // configuration of SGD updater, including learning rate, etc.
   required UpdaterProto updater = 7;
   // cluster toplogy conf
@@ -77,6 +79,8 @@ message JobProto {
   optional int32 step = 91 [default = 0];
   // job id allocated by zookeeper
   optional int32 id = 92 [default = -1];
+
+  extensions 101 to 200;
 }
 
 // Protos used by JobProto
@@ -89,13 +93,18 @@ message NetProto {
 }
 
 message UpdaterProto {
-  // updater type
-  required UpdaterType type = 1 [default = kSGD];
+  // built-in updater type
+  optional UpdaterType type = 1 [default = kUserUpdater];
+  // user-defned updater type
+  optional string user_type = 2;
+
   // configuration for RMSProp algorithm
-  optional RMSPropProto rmsprop_conf = 50;
+  optional RMSPropProto rmsprop_conf = 3;
 
-  // change method for learning rate
-  required ChangeMethod lr_change = 2 [default = kFixed];
+  // built-in change method for learning rate
+  optional ChangeMethod lr_change = 10 [default = kUserChange];
+  // user-defined change method
+  optional string user_lr_change = 11;
 
   optional FixedStepProto fixedstep_conf = 40;
   optional StepProto step_conf = 41;
@@ -109,6 +118,8 @@ message UpdaterProto {
   optional float base_lr = 34 [default = 0];
   // used to avoid divide by 0, i.e. x/(y+delta)
   optional float delta = 35 [default = 0.00000001];
+
+  extensions 101 to 200;
 }
 
 message ClusterProto {
@@ -153,8 +164,11 @@ message LayerProto {
   // some layers like data layer for loading test data are not used by training
   // phase should be removed by setting the exclude field.
   repeated Phase exclude = 15;
-  // the layer type
-  required LayerType type = 20;
+  // type of built-in layer
+  optional LayerType type = 20 [default = kUserLayer];
+  // type of user layer
+  optional string user_type =21;
+
   // proto for the specific layer
   // configuration for convolution layer
   optional ConvolutionProto convolution_conf = 30;
@@ -197,12 +211,9 @@ message LayerProto {
   optional int32 partition_dim = 60 [default = -1];
   // names of parameters shared from other layers
   optional int32 partition_id = 90 [default = 0];
-}
-
 
-// ---------------------------
-// protos for different layers
-// ---------------------------
+  extensions 101 to 200;
+}
 
 // weight matrix should be defined before bias vector
 // TODO(wangwei): separate conf for diff init method
@@ -210,15 +221,22 @@ message ParamProto {
   // used for identifying the same params from diff models and display deug info
   optional string name =  1 [default = ""];
   optional InitMethod init_method = 2 [default = kGaussian];
-  optional int32 type = 3 [default = 0];
+  // for built-in Param
+  optional ParamType type = 3 [default = kParam];
+  // for user-defined Param
+  optional string user_type = 4;
   // constant init
   optional float value = 5 [default = 1];
   // for uniform sampling
-  optional float low = 6 [default = -1];
-  optional float high = 7 [default = 1];
+  optional UniformProto uniform_conf = 6;
+  optional float low = 7 [default = -1];
+  optional float high = 8 [default = 1];
+
   // for gaussian sampling
-  optional float mean = 8 [default = 0];
-  optional float std = 9 [default = 1];
+  optional GaussianProto gaussian_conf = 9;
+  optional float mean = 10 [default = 0];
+  optional float std = 11 [default = 1];
+
   // multiplied on the global learning rate.
   optional float lr_scale = 15 [default = 1];
   // multiplied on the global weight decay.
@@ -235,6 +253,8 @@ message ParamProto {
   optional int32 partition_dim = 92;
   // usually, the program will infer the param shape
   repeated int32 shape = 93;
+
+  extensions 101 to 200;
 }
 
 // ---------------------------
@@ -353,14 +373,6 @@ message LRNProto {
   optional float alpha = 31 [default = 1.0];
   // exponential number
   optional float beta = 32 [default = 0.75];
-  enum NormRegion {
-    // across channels, e.g., r,g,b
-    ACROSS_CHANNELS = 0;
-    // within channel, e.g., r, g and b are concatenated into one channel
-    WITHIN_CHANNEL = 1;
-  }
-  // normalization objective
-  optional NormRegion norm_region = 33 [default = ACROSS_CHANNELS];
   // offset
   optional float knorm = 34 [default = 1.0];
 }
@@ -431,6 +443,15 @@ message InverseProto {
   // lr = base_lr*(1+gamma*step)^(-pow)
   required float pow = 2 [default = 0];
 }
+message UniformProto {
+  optional float low = 1 [default = -1];
+  optional float high = 2 [default = 1];
+}
+message GaussianProto {
+  optional float mean = 1 [default = 0];
+  optional float std = 2 [default = 1];
+}
+
 
 // --------------
 // All Enum Types
@@ -444,6 +465,8 @@ enum ChangeMethod {
   kLinear = 4;
   kStep = 5;
   kFixedStep = 6;
+  // For user defiend change method
+  kUserChange = 100;
 }
 
 enum InitMethod {
@@ -467,6 +490,9 @@ enum InitMethod {
   // [-4a, +4a], put the scale factor to value field.
   // <a href="http://deeplearning.net/tutorial/mlp.html"> Theano MLP</a>
   kUniformSqrtFanInOut = 6;
+
+  // For user defined init method
+  kUserInit = 101;
 }
 
 enum LayerType {
@@ -501,6 +527,9 @@ enum LayerType {
   kBridgeSrc = 15;
   kSlice = 12;
   kSplit = 13;
+
+  // Indicate the user defined layer. Users should configure user_type
+  kUserLayer = 102;
 }
 
 enum PartitionType {
@@ -522,11 +551,20 @@ enum Phase {
   kLoss = 7;
 }
 
+enum ParamType {
+  // built-in Param
+  kParam = 0;
+  // user-defined Param
+  kUser = 103;
+}
+
 enum TrainOneBatchAlg {
   // Back-propagation algorithm for feed-forward models, e.g., CNN and RNN
   kBP = 1;
   // Contrastive Divergence algorithm for RBM, DBM, etc.
   kCD = 2;
+  // For user defined algorithm.
+  kUserAlg = 104;
 }
 
 enum UpdaterType {
@@ -538,4 +576,6 @@ enum UpdaterType {
   kRMSProp = 3;
   // Nesterov first optimal gradient method
   kNesterov = 4;
+  // For user defined updater
+  kUserUpdater = 105;
 }

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/trainer/server.cc
----------------------------------------------------------------------
diff --git a/src/trainer/server.cc b/src/trainer/server.cc
index e0eb821..a8483de 100644
--- a/src/trainer/server.cc
+++ b/src/trainer/server.cc
@@ -20,7 +20,7 @@ Server::Server(int thread_id,int group_id, int server_id):
 void Server::Setup(const UpdaterProto& proto,
     std::unordered_map<int, ParamEntry*>* shard,
     const vector<int>& slice2group) {
-  updater_ = Singleton<Factory<Updater>>::Instance()->Create(proto.type());
+  updater_ = Updater::Create(proto);
   updater_->Init(proto);
   shard_ = shard;
   slice2group_ = slice2group;

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/trainer/trainer.cc
----------------------------------------------------------------------
diff --git a/src/trainer/trainer.cc b/src/trainer/trainer.cc
index fe90bc4..1d78c37 100644
--- a/src/trainer/trainer.cc
+++ b/src/trainer/trainer.cc
@@ -172,10 +172,9 @@ vector<Worker*> Trainer::CreateWorkers(int nthreads, const JobProto& job) {
     wstart = 0;
     wend = grp_size;
   }
-  auto factory = Singleton<Factory<singa::Worker>>::Instance();
   for (int gid = gstart; gid < gend; gid++) {
     for (int wid = wstart; wid < wend; wid++) {
-      Worker* worker = factory->Create(job.alg());
+      auto *worker = Worker::Create(job);
       worker->Init(nthreads++,gid, wid);
       workers.push_back(worker);
     }

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/trainer/worker.cc
----------------------------------------------------------------------
diff --git a/src/trainer/worker.cc b/src/trainer/worker.cc
index 3b1617d..25fea7c 100644
--- a/src/trainer/worker.cc
+++ b/src/trainer/worker.cc
@@ -10,6 +10,15 @@
 namespace singa {
 using std::thread;
 
+Worker* Worker::Create(const JobProto& proto) {
+  auto factory = Singleton<Factory<singa::Worker>>::Instance();
+  Worker* worker = nullptr;
+  if (proto.has_user_alg())
+    worker = factory->Create(proto.user_alg());
+  else
+    worker = factory->Create(proto.alg());
+  return worker;
+}
 void Worker::Init(int thread_id, int grp_id, int id) {
   thread_id_ = thread_id;
   grp_id_ = grp_id;

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/utils/param.cc
----------------------------------------------------------------------
diff --git a/src/utils/param.cc b/src/utils/param.cc
index 06c16e9..e658631 100644
--- a/src/utils/param.cc
+++ b/src/utils/param.cc
@@ -6,11 +6,22 @@
 #include "proto/job.pb.h"
 #include "mshadow/tensor.h"
 #include "utils/singleton.h"
+#include "utils/factory.h"
 namespace singa {
 using namespace mshadow;
 using std::vector;
 using std::string;
 
+Param* Param::Create(const ParamProto& proto) {
+  Factory<Param>* factory=Singleton<Factory<Param>>::Instance();
+  Param* p = nullptr;
+  if (proto.has_user_type())
+    p = factory->Create(proto.user_type());
+  else
+    p = factory->Create(proto.type());
+  return p;
+}
+
 Param::Param():local_version_(-1), slice_start_(0), num_slices_(0),
   num_pending_requests_(0), data_(nullptr) {
 }

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/97141e2e/src/utils/updater.cc
----------------------------------------------------------------------
diff --git a/src/utils/updater.cc b/src/utils/updater.cc
index 7bca6dc..7d80844 100644
--- a/src/utils/updater.cc
+++ b/src/utils/updater.cc
@@ -2,6 +2,8 @@
 #include "utils/updater.h"
 #include "mshadow/tensor.h"
 #include "mshadow/cxxnet_op.h"
+#include "utils/singleton.h"
+#include "utils/factory.h"
 #include "proto/job.pb.h"
 namespace  singa {
 
@@ -9,6 +11,15 @@ using namespace mshadow;
 using namespace mshadow::expr;
 
 
+Updater* Updater::Create(const UpdaterProto& proto) {
+  auto factory = Singleton<Factory<Updater>>::Instance();
+  Updater* updater = nullptr;
+  if (proto.has_user_type())
+    updater = factory->Create(proto.user_type());
+  else
+    updater = factory->Create(proto.type());
+  return updater;
+}
 float Updater::GetLearningRate(int step) {
   float ret = 0., r = 0., base = proto_.base_lr();
   int freq = 0;