You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by zh...@apache.org on 2017/05/24 12:12:21 UTC

[2/5] incubator-singa git commit: SINGA-312 Rename layer parameters

SINGA-312 Rename layer parameters

Layer parameters are now renamed to PREFIX/SUFFIX, where PREFIX could be layer name and SUFFIX could be 'weight' or 'bias' or 'mean', etc.


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/fa4f6314
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/fa4f6314
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/fa4f6314

Branch: refs/heads/master
Commit: fa4f631465ae033e9286b86f102d86554bea03d3
Parents: 0815391
Author: wangwei <wa...@comp.nus.edu.sg>
Authored: Mon May 22 16:33:50 2017 +0800
Committer: wangwei <wa...@comp.nus.edu.sg>
Committed: Mon May 22 16:33:50 2017 +0800

----------------------------------------------------------------------
 CMakeLists.txt              | 14 +++---
 include/singa/core/tensor.h |  5 +++
 python/singa/layer.py       | 18 ++++----
 python/singa/net.py         | 93 +++++++++++++++++++++++++---------------
 python/singa/snapshot.py    |  8 ++--
 test/python/test_net.py     | 24 +++++++++--
 6 files changed, 104 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/fa4f6314/CMakeLists.txt
----------------------------------------------------------------------
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 7dd1b57..c9e47a3 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -19,10 +19,10 @@
 CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
 
 PROJECT(singa)
-SET(PACKAGE_VERSION "1.1.0")
+SET(PACKAGE_VERSION "1.1.1")
 SET(SINGA_MAJOR_VERSION 1)  # 0 -
 SET(SINGA_MINOR_VERSION 1)  # 0 - 9
-SET(SINGA_PATCH_VERSION 0)  # 0 - 99
+SET(SINGA_PATCH_VERSION 1)  # 0 - 99
 MATH(EXPR SINGA_VERSION "${SINGA_MAJOR_VERSION} * 1000 + ${SINGA_MINOR_VERSION} * 100 + ${SINGA_PATCH_VERSION}")
 
 LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/Thirdparty)
@@ -174,18 +174,18 @@ INSTALL(CODE "execute_process(COMMAND python setup.py install --prefix=${CMAKE_I
 # CPack
 IF(PACKAGE)
 	IF(USE_PYTHON)
-		INSTALL(FILES ${CMAKE_BINARY_DIR}/python/setup.py 
+		INSTALL(FILES ${CMAKE_BINARY_DIR}/python/setup.py
 			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa/python)
 		INSTALL(FILES ${CMAKE_BINARY_DIR}/python/singa/singa_wrap.py
 			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa/python/singa)
 		INSTALL(FILES ${CMAKE_BINARY_DIR}/python/singa/_singa_wrap.so
 			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa/python/singa)
-		INSTALL(DIRECTORY ${PROJECT_SOURCE_DIR}/python 
-			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa)	
+		INSTALL(DIRECTORY ${PROJECT_SOURCE_DIR}/python
+			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa)
 		INSTALL(DIRECTORY ${CMAKE_BINARY_DIR}/python/singa/proto
-			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa/python/singa)	
+			DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/singa/python/singa)
 	ENDIF()
-    
+
 	IF (USE_MODULES)
 		SET(CORE_DEPENDENCIES "libgoogle-glog-dev, libstdc++6, libc6")
 	ELSE()

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/fa4f6314/include/singa/core/tensor.h
----------------------------------------------------------------------
diff --git a/include/singa/core/tensor.h b/include/singa/core/tensor.h
index c89fa83..6621fa0 100644
--- a/include/singa/core/tensor.h
+++ b/include/singa/core/tensor.h
@@ -106,6 +106,11 @@ class Tensor {
 
   bool transpose() const { return transpose_; }
 
+  /// return true if the content of the tensor is initialized
+  bool initailized() const {
+    return block_ != nullptr && block_->initialized();
+  }
+
   /// return number of total elements
   size_t Size() const {
     if (block_ == nullptr) return 0u;

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/fa4f6314/python/singa/layer.py
----------------------------------------------------------------------
diff --git a/python/singa/layer.py b/python/singa/layer.py
index 00b4763..4fe9983 100644
--- a/python/singa/layer.py
+++ b/python/singa/layer.py
@@ -347,7 +347,7 @@ class Conv2D(Layer):
         if W_specs is None:
             W_specs = {'init': 'xavier'}
         if 'name' not in W_specs:
-            W_specs['name'] = name + '_weight'
+            W_specs['name'] = name + '/weight'
         wspecs = _construct_param_specs_from_dict(W_specs)
         self.conf.param.extend([wspecs])
         self.param_specs.append(wspecs)
@@ -355,7 +355,7 @@ class Conv2D(Layer):
             if b_specs is None:
                 b_specs = {'init': 'constant'}
             if 'name' not in b_specs:
-                b_specs['name'] = name + '_bias'
+                b_specs['name'] = name + '/bias'
             bspecs = _construct_param_specs_from_dict(b_specs)
             self.conf.param.extend([bspecs])
             self.param_specs.append(bspecs)
@@ -524,11 +524,11 @@ class BatchNormalization(Layer):
         if gamma_specs is None:
             gamma_specs = {'init': 'Xavier'}
         if 'name' not in beta_specs:
-            beta_specs['name'] = name + '_beta'
+            beta_specs['name'] = name + '/beta'
         if 'name' not in gamma_specs:
-            gamma_specs['name'] = name + '_gamma'
-        mean_specs = {'init': 'constant', 'value': 0, 'name': name + '_mean'}
-        var_specs = {'init': 'constant', 'value': 1, 'name': name + '_var'}
+            gamma_specs['name'] = name + '/gamma'
+        mean_specs = {'init': 'constant', 'value': 0, 'name': name + '/mean'}
+        var_specs = {'init': 'constant', 'value': 1, 'name': name + '/var'}
         self.conf.param.extend([_construct_param_specs_from_dict(gamma_specs)])
         self.conf.param.extend([_construct_param_specs_from_dict(beta_specs)])
         self.conf.param.extend([_construct_param_specs_from_dict(mean_specs)])
@@ -656,7 +656,7 @@ class Dense(Layer):
         if W_specs is None:
             W_specs = {'init': 'xavier'}
         if 'name' not in W_specs:
-            W_specs['name'] = name + '_weight'
+            W_specs['name'] = name + '/weight'
         wspecs = _construct_param_specs_from_dict(W_specs)
         self.conf.param.extend([wspecs])
         self.param_specs.append(wspecs)
@@ -664,7 +664,7 @@ class Dense(Layer):
             if b_specs is None:
                 b_specs = {'init': 'constant', 'value': 0}
             if 'name' not in b_specs:
-                b_specs['name'] = name + '_bias'
+                b_specs['name'] = name + '/bias'
             bspecs = _construct_param_specs_from_dict(b_specs)
             self.conf.param.extend([bspecs])
             self.param_specs.append(bspecs)
@@ -1020,7 +1020,7 @@ class RNN(Layer):
         # currently only has rnn layer implemented using cudnn
         _check_engine(engine, ['cudnn'])
         if param_specs is None:
-            param_specs = {'name': name + '-weight',
+            param_specs = {'name': name + '/weight',
                            'init': 'uniform', 'low': 0, 'high': 1}
         self.conf.param.extend([_construct_param_specs_from_dict(param_specs)])
         self.param_specs.append(_construct_param_specs_from_dict(param_specs))

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/fa4f6314/python/singa/net.py
----------------------------------------------------------------------
diff --git a/python/singa/net.py b/python/singa/net.py
index 2be8d95..0226864 100644
--- a/python/singa/net.py
+++ b/python/singa/net.py
@@ -55,11 +55,14 @@ Example usages::
 """
 
 from .proto.model_pb2 import kTrain, kEval
+from __init__ import __version__
 import tensor
 import layer
 import snapshot
 import cPickle as pickle
 
+import os
+
 '''For display training information, e.g L1 value of layer data'''
 verbose = False
 
@@ -134,7 +137,7 @@ class FeedForwardNet(object):
         else:
             self.out_sample_shape_of_layer[lyr.name] = [out_shape]
         self.layers.append(lyr)
-        print lyr.name, out_shape
+        print(lyr.name, out_shape)
         return lyr
 
     def param_values(self):
@@ -181,7 +184,7 @@ class FeedForwardNet(object):
             m = self.metric.evaluate(out, y)
             return self.backward(), (l.l1(), m)
         else:
-            return self.backward(), (l.l1(),None)
+            return self.backward(), (l.l1(), None)
 
     def evaluate(self, x, y):
         '''Evaluate the loss and metric of the given data.
@@ -303,10 +306,10 @@ class FeedForwardNet(object):
                 disp_src = '+'.join([src.name for src in srcs])
                 disp_src += '-->' + cur.name
                 if type(out) is list:
-                    print '%s: %s' % (disp_src,
-                                      ' '.join([str(o.l1()) for o in out]))
+                    print('%s: %s' % (disp_src,
+                                      ' '.join([str(o.l1()) for o in out])))
                 else:
-                    print '%s: %f' % (disp_src, out.l1())
+                    print('%s: %f' % (disp_src, out.l1()))
             output_of_layer[cur.name] = out
             if cur.name in output:
                 ret[cur.name] = out
@@ -360,10 +363,10 @@ class FeedForwardNet(object):
                         [dst.name for dst in self.dst_of_layer[cur.name]])
                 disp_src += '-->' + cur.name
                 if type(outs) is list:
-                    print '%s: %s' % (disp_src,
-                                      ' '.join([str(o.l1()) for o in outs]))
+                    print('%s: %s' % (disp_src,
+                                      ' '.join([str(o.l1()) for o in outs])))
                 else:
-                    print '%s: %f' % (disp_src, outs.l1())
+                    print('%s: %f' % (disp_src, outs.l1()))
             if type(outs) is list:
                 output_of_layer[cur.name] = outs[::-1]
             else:
@@ -388,12 +391,18 @@ class FeedForwardNet(object):
         '''
         if use_pickle:
             params = {}
+            # since SINGA>=1.1.1
+            params['SINGA_VERSION'] = __version__
             for (name, val) in zip(self.param_names(), self.param_values()):
                 val.to_host()
                 params[name] = tensor.to_numpy(val)
-                with open(f, 'wb') as fd:
-                    pickle.dump(params, fd)
+            if not f.endswith('.pickle'):
+                f = f + '.pickle'
+            with open(f, 'wb') as fd:
+                pickle.dump(params, fd)
         else:
+            if f.endswith('.bin'):
+                f = f[0:-4]
             sp = snapshot.Snapshot(f, True, buffer_size)
             for (name, val) in zip(self.param_names(), self.param_values()):
                 val.to_host()
@@ -404,35 +413,49 @@ class FeedForwardNet(object):
 
         Please refer to the argument description in save().
         '''
+        version = 0
+
+        def get_name(name):
+            if version < 1011:
+                idx = name.rfind('/')
+                assert idx > 0, '/ must be in the parameter name'
+                name = name[:idx-1] + '_' + name[idx:]
+            return name
+
         if use_pickle:
-            print 'NOTE: If your model was saved using Snapshot, '\
-                    'then set use_pickle=False for loading it'
+            print('NOTE: If your model was saved using Snapshot, '
+                  'then set use_pickle=False for loading it')
+            if not os.path.exists(f):
+                # guess the correct path
+                if f.endswith('.pickle'):
+                    f = f[0:-7]
+                else:
+                    f = f + '.pickle'
+            assert os.path.exists(f), 'file not exists %s w/o .pickle' % f
             with open(f, 'rb') as fd:
                 params = pickle.load(fd)
-                for name, val in zip(self.param_names(), self.param_values()):
-                    if name not in params:
-                        print 'Param: %s missing in the checkpoint file' % name
-                        continue
-                    try:
-                        val.copy_from_numpy(params[name])
-                    except AssertionError as err:
-                        print 'Error from copying values for param: %s' % name
-                        print 'shape of param vs checkpoint', \
-                                val.shape, params[name].shape
-                        raise err
         else:
-            print 'NOTE: If your model was saved using pickle, '\
-                    'then set use_pickle=True for loading it'
+            print('NOTE: If your model was saved using pickle, '
+                  'then set use_pickle=True for loading it')
+            if f.endswith('.bin'):
+                f = f[0:-4]
             sp = snapshot.Snapshot(f, False, buffer_size)
             params = sp.read()
-            for (name, val) in zip(self.param_names(), self.param_values()):
-                if name not in params:
-                    print 'Param: %s missing in the checkpoint file' % name
-                    continue
-                try:
+        if 'SINGA_VERSION' in params:
+            # for SINGA >= 1.1.1
+            version = params['SINGA_VERSION']
+        for name, val in zip(self.param_names(), self.param_values()):
+            name = get_name(name)
+            if name not in params:
+                print('Param: %s missing in the checkpoint file' % name)
+                continue
+            try:
+                if isinstance(params[name], tensor.Tensor):
                     val.copy_data(params[name])
-                except AssertionError as err:
-                    print 'Error from copying values for param: %s' % name
-                    print 'shape of param vs checkpoint', \
-                            val.shape, params[name].shape
-                    raise err
+                else:
+                    val.copy_from_numpy(params[name])
+            except AssertionError as err:
+                print('Error from copying values for param: %s' % name)
+                print('shape of param vs checkpoint',
+                      val.shape, params[name].shape)
+                raise err

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/fa4f6314/python/singa/snapshot.py
----------------------------------------------------------------------
diff --git a/python/singa/snapshot.py b/python/singa/snapshot.py
index bd8918e..3e1298f 100644
--- a/python/singa/snapshot.py
+++ b/python/singa/snapshot.py
@@ -33,11 +33,12 @@ Example usages::
 from . import singa_wrap as singa
 import tensor
 
+
 class Snapshot(object):
     ''' Class and member functions for singa::Snapshot.
 
     '''
-    def __init__(self, f, mode, buffer_size = 10):
+    def __init__(self, f, mode, buffer_size=10):
         '''Snapshot constructor given file name and R/W mode.
 
         Args:
@@ -55,6 +56,7 @@ class Snapshot(object):
             param_val (Tensor): value tensor of the parameter
         '''
         self.snapshot.Write(str(param_name), param_val.singa_tensor)
+
     def read(self):
         '''Call read method to load all (param_name, param_val)
 
@@ -62,8 +64,8 @@ class Snapshot(object):
             a dict of (parameter name, parameter Tensor)
         '''
         params = {}
-        p = self.snapshot.Read();
+        p = self.snapshot.Read()
         for (param_name, param_val) in p:
-            print param_name
+            # print(param_name)
             params[param_name] = tensor.from_raw_tensor(param_val)
         return params

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/fa4f6314/test/python/test_net.py
----------------------------------------------------------------------
diff --git a/test/python/test_net.py b/test/python/test_net.py
index 53a4f24..50b976c 100644
--- a/test/python/test_net.py
+++ b/test/python/test_net.py
@@ -28,6 +28,7 @@ from singa import loss
 layer.engine = 'singacpp'
 # net.verbose = True
 
+
 class TestFeedForwardNet(unittest.TestCase):
 
     def test_single_input_output(self):
@@ -40,8 +41,9 @@ class TestFeedForwardNet(unittest.TestCase):
         y.set_value(0)
         out, _ = ffn.evaluate(x, y)
         self.assertAlmostEqual(out * 3,
-                - math.log(1.0/(1+math.exp(1))) - math.log(0.5) -math.log(0.5),
-                5);
+                               - math.log(1.0/(1+math.exp(1))) -
+                               math.log(0.5) - math.log(0.5),
+                               5)
 
     def test_mult_inputs(self):
         ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
@@ -52,7 +54,7 @@ class TestFeedForwardNet(unittest.TestCase):
         x1.set_value(1.1)
         x2 = tensor.Tensor((2, 2))
         x2.set_value(0.9)
-        out = ffn.forward(False, {'relu1':x1, 'relu2':x2})
+        out = ffn.forward(False, {'relu1': x1, 'relu2': x2})
         out = tensor.to_numpy(out)
         self.assertAlmostEqual(np.average(out), 2)
 
@@ -68,10 +70,24 @@ class TestFeedForwardNet(unittest.TestCase):
         x1.set_value(1.1)
         x2 = tensor.Tensor((2, 2))
         x2.set_value(0.9)
-        out = ffn.forward(False, {'relu1':x1, 'relu2':x2})
+        out = ffn.forward(False, {'relu1': x1, 'relu2': x2})
         out = tensor.to_numpy(out['split1'])
         self.assertAlmostEqual(np.average(out), 2)
 
+    def test_save(self):
+        ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
+        ffn.add(layer.Conv2D('conv', 4, 3, input_sample_shape=(3, 12, 12)))
+        ffn.add(layer.Flatten('flat'))
+        # ffn.add(layer.BatchNorm('bn'))
+        ffn.add(layer.Dense('dense', num_output=4))
+        for pname, pval in zip(ffn.param_names(), ffn.param_values()):
+            pval.set_value(0.1)
+        ffn.save('test_snaphost')
+        ffn.save('test_pickle', use_pickle=True)
+
+        ffn.load('test_snaphost')
+        ffn.load('test_pickle', use_pickle=True)
+
 
 if __name__ == '__main__':
     unittest.main()