You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by wa...@apache.org on 2019/12/21 13:03:03 UTC
[singa] branch master updated: fix
This is an automated email from the ASF dual-hosted git repository.
wangwei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/singa.git
The following commit(s) were added to refs/heads/master by this push:
new 645af8e fix
new 9c100ac Merge pull request #569 from joddiy/fix-batchnom-scale-and-bias-as-parameter
645af8e is described below
commit 645af8e9bc9986f228851d1ca6b6d5ca7dc517a1
Author: joddiy <jo...@qq.com>
AuthorDate: Thu Dec 19 20:10:13 2019 +0800
fix
---
python/singa/autograd.py | 9 +++------
python/singa/sonnx.py | 26 +-------------------------
2 files changed, 4 insertions(+), 31 deletions(-)
diff --git a/python/singa/autograd.py b/python/singa/autograd.py
index cc5bb6f..37cb431 100644
--- a/python/singa/autograd.py
+++ b/python/singa/autograd.py
@@ -1498,16 +1498,13 @@ class BatchNorm2d(Layer):
class _BatchNorm2d(Operation):
- def __init__(self, handle, running_mean, running_var, scale, bias, name=None):
+ def __init__(self, handle, running_mean, running_var, name=None):
super(_BatchNorm2d, self).__init__(name)
self.handle = handle
self.running_mean = running_mean.data
self.running_var = running_var.data
- self.scale = scale.data
- self.bias = bias.data
- def forward(self, x):
- scale, bias = self.scale, self.bias
+ def forward(self, x, scale, bias):
if training:
if (type(self.handle) == singa.BatchNormHandle):
y, mean, var = singa.CpuBatchNormForwardTraining(
@@ -1566,7 +1563,7 @@ class _BatchNorm2d(Operation):
def batchnorm_2d(handle, x, scale, bias, running_mean, running_var):
- return _BatchNorm2d(handle, running_mean, running_var, scale, bias)(x)[0]
+ return _BatchNorm2d(handle, running_mean, running_var)(x, scale, bias)[0]
class _Pooling2d(Operation):
diff --git a/python/singa/sonnx.py b/python/singa/sonnx.py
index e124135..5b175ba 100755
--- a/python/singa/sonnx.py
+++ b/python/singa/sonnx.py
@@ -485,29 +485,12 @@ class SingaFrontend(object):
# then we add nodes of scal, bias, mean, var
nodes = []
running_values = {
- "scale": op.scale,
- "bias": op.bias,
"mean": op.running_mean,
"var": op.running_var
}
for tmp_name, running_value in running_values.items():
node_name = op.name+":"+tmp_name
bn_node.input.append(node_name)
- # running_value.ToHost()
- # running_value = running_value.GetFloatValue(int(running_value.Size()))
- # node = NodeProto()
- # node.name = node_name
- # node.op_type = cls._rename_operators.get("Dummy", "Dummy")
- # node.output.extend([node_name])
- # node.attribute.extend([helper.make_attribute(
- # 'value', helper.make_tensor(
- # name=node_name,
- # data_type=TensorProto.FLOAT,
- # dims=[len(running_value)],
- # vals=running_value,
- # )
- # )])
- # nodes.append(node)
nodes.append(bn_node)
return nodes
@@ -664,8 +647,6 @@ class SingaFrontend(object):
elif yid in input_tensors and optype == '_BatchNorm2d':
# batchnorm add scale, bias, mean, var as inputs
running_values = {
- "scale": op.scale,
- "bias": op.bias,
"mean": op.running_mean,
"var": op.running_var
}
@@ -1396,11 +1377,6 @@ class SingaBackend(Backend):
inputs = [tensor_map[x].clone() for x in node.inputs]
handle, forward = cls._onnx_node_to_singa_op(node, inputs, opset_version)
singa_ops.extend([singa_op(node.name, node, handle, forward)])
- # we must know the shape of ouput
- # becasue it will become the input of next layer
- # so we need to init a new tensor with the same shape with the output
- # outputs = cls._run_node(node, inputs, handle, forward, opset_version)
- # tensor_map.update(outputs)
return weights, singa_ops
@classmethod
@@ -1482,7 +1458,7 @@ class SingaRep(BackendRep):
ret_outputs = collections.OrderedDict()
# run the handle by the order of the list(the list is Topological Sorting)
for x, val in zip(self.model.graph.input, inputs):
- self.tensor_map[x.name] = val
+ self.tensor_map[x.name] = val
for _, op, handle, forward in self.singa_ops[:last_layers]:
inputs = [self.tensor_map[x] for x in op.inputs]
outputs = _run_node(op, inputs, handle, forward)