You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by ka...@apache.org on 2016/11/24 07:18:05 UTC

[2/2] incubator-singa git commit: SINGA-261 Add version ID into the checkpoint files

SINGA-261 Add version ID into the checkpoint files

Add singa version and cudnn version for PySINGA, singa.__version, singa.layer.cudnn_version
Fixed a bug in using PySINGA rnn (caused by swig rename of forward and backward)


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/e43ea694
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/e43ea694
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/e43ea694

Branch: refs/heads/master
Commit: e43ea694521d64bac276d9e3e634edd5d6a04be9
Parents: 0bf1475
Author: Wei Wang <wa...@gmail.com>
Authored: Wed Nov 23 14:42:04 2016 +0800
Committer: Wei Wang <wa...@gmail.com>
Committed: Wed Nov 23 14:42:04 2016 +0800

----------------------------------------------------------------------
 cmake/Templates/singa_config.h.in |  2 +-
 cmake/Thirdparty/FindCUDNN.cmake  |  2 +-
 python/singa/__init__.py          |  8 +++++---
 python/singa/layer.py             | 21 +++++++++++++++++++--
 src/api/model_layer.i             | 17 +++++++++++------
 5 files changed, 37 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/e43ea694/cmake/Templates/singa_config.h.in
----------------------------------------------------------------------
diff --git a/cmake/Templates/singa_config.h.in b/cmake/Templates/singa_config.h.in
index baa625c..181c9fd 100644
--- a/cmake/Templates/singa_config.h.in
+++ b/cmake/Templates/singa_config.h.in
@@ -48,4 +48,4 @@
 #cmakedefine CUDNN_MAJOR_VERSION @CUDNN_MAJOR_VERSION@
 #cmakedefine CUDNN_MINOR_VERSION @CUDNN_MINOR_VERSION@
 #cmakedefine CUDNN_PATCH_VERSION @CUDNN_PATCH_VERSION@
-#cmakedefine CUDNN_VERSION @CUDNN_VERSION@
+// #cmakedefine CUDNN_VERSION @CUDNN_VERSION@

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/e43ea694/cmake/Thirdparty/FindCUDNN.cmake
----------------------------------------------------------------------
diff --git a/cmake/Thirdparty/FindCUDNN.cmake b/cmake/Thirdparty/FindCUDNN.cmake
index d48fc0e..c287ace 100644
--- a/cmake/Thirdparty/FindCUDNN.cmake
+++ b/cmake/Thirdparty/FindCUDNN.cmake
@@ -40,7 +40,7 @@ IF(CUDNN_FOUND)
     STRING(REGEX REPLACE "define CUDNN_PATCHLEVEL * +([0-9]+)" "\\1"
         CUDNN_PATCH_VERSION "${CUDNN_PATCH_VERSION}")
 
-    IF(NOT CUDNN_VERSION_MAJOR)
+    IF(NOT CUDNN_MAJOR_VERSION)
         SET(CUDNN_VERSION "???")
     ELSE()
       MATH(EXPR CUDNN_VERSION "${CUDNN_MAJOR_VERSION} * 1000 + ${CUDNN_MINOR_VERSION} * 100 + ${CUDNN_PATCH_VERSION}")

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/e43ea694/python/singa/__init__.py
----------------------------------------------------------------------
diff --git a/python/singa/__init__.py b/python/singa/__init__.py
index c81c6ef..3f24896 100644
--- a/python/singa/__init__.py
+++ b/python/singa/__init__.py
@@ -6,14 +6,16 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #     http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-# 
+#
 
+from . import singa_wrap
 
+__version__ = singa_wrap.SINGA_VERSION

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/e43ea694/python/singa/layer.py
----------------------------------------------------------------------
diff --git a/python/singa/layer.py b/python/singa/layer.py
index f0024c4..0244454 100644
--- a/python/singa/layer.py
+++ b/python/singa/layer.py
@@ -63,6 +63,11 @@ engine is case insensitive. Each python layer would create the correct specific
 layer using the engine attribute.
 '''
 
+if singa_wrap.USE_CUDNN:
+    cudnn_version = singa_wrap.CUDNN_VERSION
+else:
+    cudnn_version = 0
+
 
 class Layer(object):
     '''Base Python layer class.
@@ -219,6 +224,12 @@ class Layer(object):
             <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x
             , dpi is the gradient of the i-th parameter
         '''
+        if type(flag) is bool:
+            if flag:
+                flag = model_pb2.kTrain
+            else:
+                flag = model_pb2.kEval
+
         if type(dy) == list:
             dys = [t.singa_tensor for t in dy]
             ret = self.layer.BackwardWithMultInputs(flag, dys)
@@ -918,7 +929,7 @@ class RNN(Layer):
                 flag = model_pb2.kTrain
             else:
                 flag = model_pb2.kEval
-        y = self.layer.Forward(flag, tensors)
+        y = self.layer.ForwardWithMultInputs(flag, tensors)
         return tensor.from_raw_tensors(y)
 
     def backward(self, flag, grad):
@@ -942,11 +953,17 @@ class RNN(Layer):
                 hidden state. dcx is the gradient for the initial cell state,
                 which is valid only for lstm.
         '''
+        if type(flag) is bool:
+            if flag:
+                flag = model_pb2.kTrain
+            else:
+                flag = model_pb2.kEval
+
         tensors = []
         for t in grad:
             assert isinstance(t, tensor.Tensor), 'grad must be py Tensor'
             tensors.append(t.singa_tensor)
-        ret = self.layer.Backward(flag, tensors)
+        ret = self.layer.BackwardWithMultInputs(flag, tensors)
         return tensor.from_raw_tensors(ret[0]), tensor.from_raw_tensors(ret[1])
 
 

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/e43ea694/src/api/model_layer.i
----------------------------------------------------------------------
diff --git a/src/api/model_layer.i b/src/api/model_layer.i
index e5df285..92919fd 100644
--- a/src/api/model_layer.i
+++ b/src/api/model_layer.i
@@ -74,15 +74,17 @@ class Layer {
   virtual const std::vector<size_t> GetOutputSampleShape(int k);
   virtual void ToDevice(std::shared_ptr<Device> device);
   virtual void AsType(DataType dtype);
+
   virtual const Tensor Forward(int flag, const Tensor& input);
   %rename(ForwardWithMultInputs) Forward(int flag, const std::vector<Tensor>&);
   virtual const std::vector<Tensor> Forward(
       int flag, const std::vector<Tensor>& inputs);
+
   virtual const std::pair<Tensor, std::vector<Tensor>> Backward(
       int flag, const Tensor& grad);
-  %rename(BackwardWithMultInputs) Backward(int, const vector<Tensor>&);
+  %rename(BackwardWithMultInputs) Backward(int, const std::vector<Tensor>&);
   virtual const std::pair<std::vector<Tensor>, std::vector<Tensor>>
-  Backward(int flag, const vector<Tensor>& grads);
+  Backward(int flag, const std::vector<Tensor>& grads);
 };
 
 std::shared_ptr<Layer> CreateLayer(const std::string& type);
@@ -95,11 +97,14 @@ class RNN : public Layer {
 #if CUDNN_VERSION >= 5005
 class CudnnRNN : public RNN {
  public:
- // note: Must use std::vector instead of vector.
-  const std::vector<Tensor> Forward(int flag,
-                                    const std::vector<Tensor>& inputs) override;
+  // note: Must use std::vector instead of vector.
+  %rename(ForwardWithMultInputs) Forward(int flag, const std::vector<Tensor>&);
+  const std::vector<Tensor> Forward(
+      int flag, const std::vector<Tensor>& inputs);
+  %rename(BackwardWithMultInputs) Backward(int, const std::vector<Tensor>&);
   const std::pair<std::vector<Tensor>, std::vector<Tensor>>
-  Backward(int flag, const std::vector<Tensor>& grads) override;
+  Backward(int flag, const std::vector<Tensor>& grads);
+
   void ToDevice(std::shared_ptr<Device> device) override;
   const std::vector<Tensor> param_values() override;
   const std::vector<size_t> GetOutputSampleShape() const override;