You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2019/07/29 08:07:01 UTC

[GitHub] [incubator-mxnet] hdmjdp opened a new issue #15685: about hybrid_forward

hdmjdp opened a new issue #15685: about hybrid_forward
URL: https://github.com/apache/incubator-mxnet/issues/15685
 
 
   class ...(nn.HybridBlock):
   with self.name_scope():  # 3*3 5*5 7*7
               self.weight_dw3 = self.params.get('weight_dw3',
                                                  shape=(self.num_input // 3, 1, 3),
                                                  init=mx.init.Xavier(),
                                                  dtype=self.dtype,
                                                  allow_deferred_init=True)
               self.weight_dw5 = self.params.get('weight_dw5',
                                                  shape=(self.num_input // 3, 1, 5),
                                                  init=mx.init.Xavier(),
                                                  dtype=self.dtype,
                                                  allow_deferred_init=True),
               self.weight_dw7 = self.params.get('weight_dw7',
                                                  shape=(self.num_input // 3, 1, 7),
                                                  init=mx.init.Xavier(),
                                                  dtype=self.dtype,
                                                  allow_deferred_init=True)
               self.bias_dw3 = self.params.get('bias_dw3',
                                                shape=(self.num_input // 3,),
                                                init=mx.init.Zero(),
                                                dtype=self.dtype,
                                                allow_deferred_init=True),
               self.bias_dw5 = self.params.get('bias_dw5',
                                                shape=(self.num_input // 3,),
                                                init=mx.init.Zero(),
                                                dtype=self.dtype,
                                                allow_deferred_init=True),
               self.bias_dw7 = self.params.get('bias_dw7',
                                                shape=(self.num_input // 3,),
                                                init=mx.init.Zero(),
                                                dtype=self.dtype,
                                                allow_deferred_init=True)
               self.g_dw3 = self.params.get('gain_dw3',
                                             shape=(self.num_input // 3, 1, 1),
                                             init=mx.init.Constant(
                                                 mx.init.random.uniform(1, np.sqrt(5), shape=(self.num_input // 3, 1, 1))),
                                             dtype=self.dtype,
                                             allow_deferred_init=True),
               self.g_dw5 = self.params.get('gain_dw5',
                                             shape=(self.num_input // 3, 1, 1),
                                             init=mx.init.Constant(
                                                 mx.init.random.uniform(1, np.sqrt(5), shape=(self.num_input // 3, 1, 1))),
                                             dtype=self.dtype,
                                             allow_deferred_init=True),
               self.g_dw7 = self.params.get('gain_dw7',
                                             shape=(self.num_input // 3, 1, 1),
                                             init=mx.init.Constant(
                                                 mx.init.random.uniform(1, np.sqrt(5), shape=(self.num_input // 3, 1, 1))),
                                             dtype=self.dtype,
                                             allow_deferred_init=True)
   
               self.weight_pw = self.params.get('weight_pw',
                                                shape=(self.num_hidden, self.num_input, 1),
                                                init=mx.init.Xavier(),
                                                dtype=self.dtype,
                                                allow_deferred_init=True)
               self.bias_pw = self.params.get('bias_pw',
                                              shape=(self.num_hidden,),
                                              init=mx.init.Zero(),
                                              dtype=self.dtype,
                                              allow_deferred_init=True)
               self.g_pw = self.params.get('gain_pw',
                                           shape=(self.num_hidden, 1, 1),
                                           init=mx.init.Constant(
                                               mx.init.random.uniform(1, np.sqrt(5), shape=(self.num_hidden, 1, 1))),
                                           dtype=self.dtype,
                                           allow_deferred_init=True)
   
               self.dropout = nn.Dropout(rate=dropout_rate)
   
       def hybrid_forward(self, F, inputs, weight_dw3, weight_dw5, weight_dw7, bias_dw3, bias_dw5, bias_dw7, g_dw3, g_dw5, g_dw7, weight_pw, bias_pw, g_pw, *args, **kwargs):
           '''
           Args:
             inputs: A 3-D tensor with shape of [batch, depth, time].
   
   
             Returns:
             A tensor of the same shape and dtypes as `inputs`.
           '''
   
   
   when I call it : self.conv_blocks_textenc[j](tensor), it will give this error:
       wld, vuv_logits = Vec2Cmp(vec)
     File "/home/hdm/.local/lib/python3.5/site-packages/mxnet/gluon/block.py", line 541, in __call__
       out = self.forward(*args)
     File "/home/hdm/.local/lib/python3.5/site-packages/mxnet/gluon/block.py", line 918, in forward
       return self.hybrid_forward(ndarray, x, *args, **params)
     File "/home/hdm/Documents/dctts/vec2cmp-mx-float/networks_3.py", line 94, in hybrid_forward
       tensor = self.conv_blocks_textenc[j](tensor)
     File "/home/hdm/.local/lib/python3.5/site-packages/mxnet/gluon/block.py", line 541, in __call__
       out = self.forward(*args)
     File "/home/hdm/.local/lib/python3.5/site-packages/mxnet/gluon/block.py", line 918, in forward
       return self.hybrid_forward(ndarray, x, *args, **params)
   TypeError: hybrid_forward() missing 5 required positional arguments: 'weight_dw5', 'bias_dw3', 'bias_dw5', 'g_dw3', and 'g_dw5'
   
   
   
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services