You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by wa...@apache.org on 2016/01/05 19:10:34 UTC
[05/10] incubator-singa git commit: SINGA-120 - Implemented GRU and
BPTT: Updated dummy layer for fixing bugs
SINGA-120 - Implemented GRU and BPTT: Updated dummy layer for fixing bugs
Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/1f03f9d8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/1f03f9d8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/1f03f9d8
Branch: refs/heads/master
Commit: 1f03f9d8486cb85e55c585121b2a8838bae4ed44
Parents: b4b6a38
Author: Ju Fan <fa...@gmail.com>
Authored: Fri Jan 1 10:54:43 2016 +0800
Committer: Wei Wang <wa...@comp.nus.edu.sg>
Committed: Wed Jan 6 01:53:05 2016 +0800
----------------------------------------------------------------------
src/neuralnet/neuron_layer/dummy.cc | 17 ++++++++++-------
1 file changed, 10 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/1f03f9d8/src/neuralnet/neuron_layer/dummy.cc
----------------------------------------------------------------------
diff --git a/src/neuralnet/neuron_layer/dummy.cc b/src/neuralnet/neuron_layer/dummy.cc
index 11ce3ec..9ccb179 100644
--- a/src/neuralnet/neuron_layer/dummy.cc
+++ b/src/neuralnet/neuron_layer/dummy.cc
@@ -22,12 +22,14 @@
#include <glog/logging.h>
#include "singa/neuralnet/neuron_layer.h"
#include "singa/utils/math_blob.h"
+#include "singa/utils/context.h"
+#include "singa/utils/singleton.h"
namespace singa {
void DummyLayer::Setup(const LayerProto& proto,
const vector<Layer*>& srclayers) {
- Layer::Setup(proto, srclayers);
+ NeuronLayer::Setup(proto, srclayers);
if (proto.dummy_conf().input()) { // use as input layer
CHECK_EQ(srclayers.size(), 0);
input_ = true;
@@ -43,27 +45,28 @@ void DummyLayer::Setup(const LayerProto& proto,
if (proto.dummy_conf().output()) { // use as output layer
output_ = true;
}
-}
-std::random_device rd;
-std::mt19937 gen(rd());
-std::uniform_real_distribution<> dis(0, 1);
+}
void DummyLayer::ComputeFeature(int flag, const vector<Layer*>& srclayers) {
+ std::uniform_real_distribution<float> dis(0, 1);
+ auto gen = Singleton<Context>::Instance()->rand_generator();
if (input_) {
// randomly init data with [0,1] values
for (int i = 0; i < data_.count(); ++i)
- data_.mutable_cpu_data()[i] = dis(gen);
+ data_.mutable_cpu_data()[i] = dis(*gen);
}
if (srclayers.size() > 0)
Copy(srclayers[0]->data(this), &data_);
}
void DummyLayer::ComputeGradient(int flag, const vector<Layer*>& srclayers) {
+ std::uniform_real_distribution<float> dis(0, 1);
+ auto gen = Singleton<Context>::Instance()->rand_generator();
if (output_) {
// randomly init data with [0,1] values
for (int i = 0; i < data_.count(); ++i)
- grad_.mutable_cpu_data()[i] = dis(gen);
+ grad_.mutable_cpu_data()[i] = dis(*gen);
}
if (srclayers.size() > 0)
Copy(grad_, srclayers[0]->mutable_grad(this));