You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by ns...@apache.org on 2018/11/27 17:48:40 UTC

[incubator-mxnet] branch master updated: [Example] fix cpp example inception-bn and training acc issue (#13284)

This is an automated email from the ASF dual-hosted git repository.

nswamy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new ab71205  [Example] fix cpp example inception-bn and training acc issue (#13284)
ab71205 is described below

commit ab712056b6f84ecb0f645e37ea8699df38965255
Author: Lai Wei <ro...@gmail.com>
AuthorDate: Tue Nov 27 09:48:25 2018 -0800

    [Example] fix cpp example inception-bn and training acc issue (#13284)
    
    * fix inception-bn and training acc issue
    
    * add parameter initialization, fix lint
    
    * fix comparison
    
    * change optimizer to sgd
    
    * update sgd and update model name
    
    * add inception_bn in jenkins build
    
    * make max epoch an argument
    
    * remove inception_bn test
    
    * trigger ci
    
    * remove ci test
    
    * trigger ci
---
 cpp-package/example/alexnet.cpp               |  2 +-
 cpp-package/example/charRNN.cpp               |  2 +-
 cpp-package/example/googlenet.cpp             |  2 +-
 cpp-package/example/inception_bn.cpp          | 26 +++++++++++++++++++-------
 cpp-package/example/lenet_with_mxdataiter.cpp |  2 +-
 cpp-package/example/resnet.cpp                | 19 +++++++++++++++----
 cpp-package/include/mxnet-cpp/symbol.hpp      |  4 ++--
 7 files changed, 40 insertions(+), 17 deletions(-)

diff --git a/cpp-package/example/alexnet.cpp b/cpp-package/example/alexnet.cpp
index a5f4952..7564d43 100644
--- a/cpp-package/example/alexnet.cpp
+++ b/cpp-package/example/alexnet.cpp
@@ -249,7 +249,7 @@ int main(int argc, char const *argv[]) {
   auto val_iter = MXDataIter("MNISTIter");
   setDataIter(&val_iter, "Label", data_files, batch_size);
 
-  Optimizer* opt = OptimizerRegistry::Find("ccsgd");
+  Optimizer* opt = OptimizerRegistry::Find("sgd");
   opt->SetParam("momentum", 0.9)
      ->SetParam("rescale_grad", 1.0 / batch_size)
      ->SetParam("clip_gradient", 10)
diff --git a/cpp-package/example/charRNN.cpp b/cpp-package/example/charRNN.cpp
index ad564f6..54b8eea 100644
--- a/cpp-package/example/charRNN.cpp
+++ b/cpp-package/example/charRNN.cpp
@@ -465,7 +465,7 @@ void train(const std::string file, int batch_size, int max_epoch, int start_epoc
 
   mx_float learning_rate = 0.0002;
   mx_float weight_decay = 0.000002;
-  Optimizer* opt = OptimizerRegistry::Find("ccsgd");
+  Optimizer* opt = OptimizerRegistry::Find("sgd");
   opt->SetParam("lr", learning_rate)
      ->SetParam("wd", weight_decay);
 //  opt->SetParam("momentum", 0.9)->SetParam("rescale_grad", 1.0 / batch_size)
diff --git a/cpp-package/example/googlenet.cpp b/cpp-package/example/googlenet.cpp
index ad9212c..4bd3be2 100644
--- a/cpp-package/example/googlenet.cpp
+++ b/cpp-package/example/googlenet.cpp
@@ -144,7 +144,7 @@ int main(int argc, char const *argv[]) {
   auto val_iter = MXDataIter("MNISTIter");
   setDataIter(&val_iter, "Label", data_files, batch_size);
 
-  Optimizer* opt = OptimizerRegistry::Find("ccsgd");
+  Optimizer* opt = OptimizerRegistry::Find("sgd");
   opt->SetParam("momentum", 0.9)
      ->SetParam("rescale_grad", 1.0 / batch_size)
      ->SetParam("clip_gradient", 10)
diff --git a/cpp-package/example/inception_bn.cpp b/cpp-package/example/inception_bn.cpp
index c499df7..5b444e4 100644
--- a/cpp-package/example/inception_bn.cpp
+++ b/cpp-package/example/inception_bn.cpp
@@ -91,7 +91,8 @@ Symbol InceptionFactoryB(Symbol data, int num_3x3red, int num_3x3,
                         Shape(1, 1), name + "_double_3x3_1");
   Symbol pooling = Pooling("max_pool_" + name + "_pool", data,
                            Shape(3, 3), PoolingPoolType::kMax,
-                           false, false, PoolingPoolingConvention::kValid, Shape(2, 2));
+                           false, false, PoolingPoolingConvention::kValid,
+                           Shape(2, 2), Shape(1, 1));
   std::vector<Symbol> lst;
   lst.push_back(c3x3);
   lst.push_back(cd3x3);
@@ -143,8 +144,8 @@ Symbol InceptionSymbol(int num_classes) {
 
 int main(int argc, char const *argv[]) {
   int batch_size = 40;
-  int max_epoch = 100;
-  float learning_rate = 1e-4;
+  int max_epoch = argc > 1 ? strtol(argv[1], NULL, 10) : 100;
+  float learning_rate = 1e-2;
   float weight_decay = 1e-4;
 
   auto ctx = Context::gpu();
@@ -172,7 +173,13 @@ int main(int argc, char const *argv[]) {
   auto val_iter = MXDataIter("MNISTIter");
   setDataIter(&val_iter, "Label", data_files, batch_size);
 
-  Optimizer* opt = OptimizerRegistry::Find("ccsgd");
+  // initialize parameters
+  Xavier xavier = Xavier(Xavier::gaussian, Xavier::in, 2);
+  for (auto &arg : args_map) {
+    xavier(arg.first, &arg.second);
+  }
+
+  Optimizer* opt = OptimizerRegistry::Find("sgd");
   opt->SetParam("momentum", 0.9)
      ->SetParam("rescale_grad", 1.0 / batch_size)
      ->SetParam("clip_gradient", 10)
@@ -182,9 +189,12 @@ int main(int argc, char const *argv[]) {
   auto *exec = inception_bn_net.SimpleBind(ctx, args_map);
   auto arg_names = inception_bn_net.ListArguments();
 
+  // Create metrics
+  Accuracy train_acc, val_acc;
   for (int iter = 0; iter < max_epoch; ++iter) {
     LG << "Epoch: " << iter;
     train_iter.Reset();
+    train_acc.Reset();
     while (train_iter.Next()) {
       auto data_batch = train_iter.GetDataBatch();
       data_batch.data.CopyTo(&args_map["data"]);
@@ -200,10 +210,11 @@ int main(int argc, char const *argv[]) {
       }
 
       NDArray::WaitAll();
+      train_acc.Update(data_batch.label, exec->outputs[0]);
     }
 
-    Accuracy acu;
     val_iter.Reset();
+    val_acc.Reset();
     while (val_iter.Next()) {
       auto data_batch = val_iter.GetDataBatch();
       data_batch.data.CopyTo(&args_map["data"]);
@@ -211,9 +222,10 @@ int main(int argc, char const *argv[]) {
       NDArray::WaitAll();
       exec->Forward(false);
       NDArray::WaitAll();
-      acu.Update(data_batch.label, exec->outputs[0]);
+      val_acc.Update(data_batch.label, exec->outputs[0]);
     }
-    LG << "Accuracy: " << acu.Get();
+    LG << "Train Accuracy: " << train_acc.Get();
+    LG << "Validation Accuracy: " << val_acc.Get();
   }
   delete exec;
   MXNotifyShutdown();
diff --git a/cpp-package/example/lenet_with_mxdataiter.cpp b/cpp-package/example/lenet_with_mxdataiter.cpp
index 9869356..4df6fbe 100644
--- a/cpp-package/example/lenet_with_mxdataiter.cpp
+++ b/cpp-package/example/lenet_with_mxdataiter.cpp
@@ -102,7 +102,7 @@ int main(int argc, char const *argv[]) {
   auto val_iter = MXDataIter("MNISTIter");
   setDataIter(&val_iter, "Label", data_files, batch_size);
 
-  Optimizer* opt = OptimizerRegistry::Find("ccsgd");
+  Optimizer* opt = OptimizerRegistry::Find("sgd");
   opt->SetParam("momentum", 0.9)
      ->SetParam("rescale_grad", 1.0)
      ->SetParam("clip_gradient", 10)
diff --git a/cpp-package/example/resnet.cpp b/cpp-package/example/resnet.cpp
index bc86c0b..0bb77a1 100644
--- a/cpp-package/example/resnet.cpp
+++ b/cpp-package/example/resnet.cpp
@@ -184,7 +184,13 @@ int main(int argc, char const *argv[]) {
   auto val_iter = MXDataIter("MNISTIter");
   setDataIter(&val_iter, "Label", data_files, batch_size);
 
-  Optimizer* opt = OptimizerRegistry::Find("ccsgd");
+  // initialize parameters
+  Xavier xavier = Xavier(Xavier::gaussian, Xavier::in, 2);
+  for (auto &arg : args_map) {
+    xavier(arg.first, &arg.second);
+  }
+
+  Optimizer* opt = OptimizerRegistry::Find("sgd");
   opt->SetParam("lr", learning_rate)
      ->SetParam("wd", weight_decay)
      ->SetParam("momentum", 0.9)
@@ -194,9 +200,12 @@ int main(int argc, char const *argv[]) {
   auto *exec = resnet.SimpleBind(ctx, args_map);
   auto arg_names = resnet.ListArguments();
 
+  // Create metrics
+  Accuracy train_acc, val_acc;
   for (int iter = 0; iter < max_epoch; ++iter) {
     LG << "Epoch: " << iter;
     train_iter.Reset();
+    train_acc.Reset();
     while (train_iter.Next()) {
       auto data_batch = train_iter.GetDataBatch();
       data_batch.data.CopyTo(&args_map["data"]);
@@ -211,10 +220,11 @@ int main(int argc, char const *argv[]) {
         opt->Update(i, exec->arg_arrays[i], exec->grad_arrays[i]);
       }
       NDArray::WaitAll();
+      train_acc.Update(data_batch.label, exec->outputs[0]);
     }
 
-    Accuracy acu;
     val_iter.Reset();
+    val_acc.Reset();
     while (val_iter.Next()) {
       auto data_batch = val_iter.GetDataBatch();
       data_batch.data.CopyTo(&args_map["data"]);
@@ -222,9 +232,10 @@ int main(int argc, char const *argv[]) {
       NDArray::WaitAll();
       exec->Forward(false);
       NDArray::WaitAll();
-      acu.Update(data_batch.label, exec->outputs[0]);
+      val_acc.Update(data_batch.label, exec->outputs[0]);
     }
-    LG << "Accuracy: " << acu.Get();
+    LG << "Train Accuracy: " << train_acc.Get();
+    LG << "Validation Accuracy: " << val_acc.Get();
   }
   delete exec;
   MXNotifyShutdown();
diff --git a/cpp-package/include/mxnet-cpp/symbol.hpp b/cpp-package/include/mxnet-cpp/symbol.hpp
index b82e060..aed9639 100644
--- a/cpp-package/include/mxnet-cpp/symbol.hpp
+++ b/cpp-package/include/mxnet-cpp/symbol.hpp
@@ -281,8 +281,8 @@ inline void Symbol::InferExecutorArrays(
     auto iter_req = grad_req_type.find(arg_name);
     if (iter_req != grad_req_type.end()) {
       grad_reqs->push_back(iter_req->second);
-    } else if (arg_name.rfind("data") == arg_name.length() - 4
-            || arg_name.rfind("label") == arg_name.length() - 5) {
+    } else if (arg_name.rfind("data") != std::string::npos
+            || arg_name.rfind("label") != std::string::npos) {
       grad_reqs->push_back(OpReqType::kNullOp);
     } else {
       grad_reqs->push_back(OpReqType::kWriteTo);