You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2020/03/11 10:43:41 UTC

[GitHub] [incubator-mxnet] aGiant commented on issue #17814: mxnet.gluon.data.vision.transforms.Normalize(mean=0.0, std=1.0) tuple issue within hybird_forward()

aGiant commented on issue #17814: mxnet.gluon.data.vision.transforms.Normalize(mean=0.0, std=1.0) tuple issue within hybird_forward()
URL: https://github.com/apache/incubator-mxnet/issues/17814#issuecomment-597561951
 
 
   Also tried to reproduce the demo code from link: https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/custom_layer_beginners.html, but got error.
   ```
   # Do some initial imports used throughout this tutorial
   from __future__ import print_function
   import mxnet as mx
   from mxnet import nd, gluon, autograd
   from mxnet.gluon.nn import Dense
   mx.random.seed(1)    
   class NormalizationHybridLayer(gluon.HybridBlock):
       def __init__(self, hidden_units, scales):
           super(NormalizationHybridLayer, self).__init__()
   
           with self.name_scope():
               self.weights = self.params.get('weights',
                                              shape=(hidden_units, 0),
                                              allow_deferred_init=True)
   
               self.scales = self.params.get('scales',
                                         shape=scales.shape,
                                         init=mx.init.Constant(scales.asnumpy()),
                                         differentiable=False)
   
       def hybrid_forward(self, F, x, weights, scales):
           normalized_data = F.broadcast_div(F.broadcast_sub(x, F.min(x)), (F.broadcast_sub(F.max(x), F.min(x))))
           weighted_data = F.FullyConnected(normalized_data, weights, num_hidden=self.weights.shape[0], no_bias=True)
           scaled_data = F.broadcast_mul(scales, weighted_data)
           return scaled_data
   
   def print_params(title, net):
       """
       Helper function to print out the state of parameters of NormalizationHybridLayer
       """
       print(title)
       hybridlayer_params = {k: v for k, v in net.collect_params().items() if 'normalizationhybridlayer' in k }
   
       for key, value in hybridlayer_params.items():
           print('{} = {}\n'.format(key, value.data()))
   
   net = gluon.nn.HybridSequential()                             # Define a Neural Network as a sequence of hybrid blocks
   with net.name_scope():                                        # Used to disambiguate saving and loading net parameters
       net.add(Dense(5))                                         # Add Dense layer with 5 neurons
       net.add(NormalizationHybridLayer(hidden_units=5,
                                        scales = nd.array([2]))) # Add our custom layer
       net.add(Dense(1))                                         # Add Dense layer with 1 neurons
   
   
   net.initialize(mx.init.Xavier(magnitude=2.24))                # Initialize parameters of all layers
   net.hybridize()                                               # Create, optimize and cache computational graph
   
   inputs = nd.random_uniform(low=-10, high=10, shape=(5, 2))     # Create 5 random examples with 2 feature each in range [-10, 10]
   label = nd.random_uniform(low=-1, high=1, shape=(5, 1))
   
   mse_loss = gluon.loss.L2Loss()                                # Mean squared error between output and label
   trainer = gluon.Trainer(net.collect_params(),                 # Init trainer with Stochastic Gradient Descent (sgd) optimization method and parameters for it
                           'sgd',
                           {'learning_rate': 0.1, 'momentum': 0.9 })
   
   with autograd.record():                                       # Autograd records computations done on NDArrays inside "with" block
       output = net(inputs)                                       # Run forward propogation
   
       print_params("=========== Parameters after forward pass ===========\n", net)
       loss = mse_loss(output, label)                            # Calculate MSE
   
   loss.backward()                                               # Backward computes gradients and stores them as a separate array within each NDArray in .grad field
   trainer.step(inputs.shape[0])                                  # Trainer updates parameters of every block, using .grad field using oprimization method (sgd in this example)
                                                                 # We provide batch size that is used as a divider in cost function formula
   print_params("=========== Parameters after backward pass ===========\n", net)
   ```
   
   Error raised:
   ```
   TypeError                          Traceback (most recent call last)
   <ipython-input-45-289ed4b30586> in <module>
        29 
        30 with autograd.record():                                       # Autograd records computations done on NDArrays inside "with" block
   ---> 31     output = net(inputs)                                       # Run forward propogation
        32 
        33     print_params("=========== Parameters after forward pass ===========\n", net)
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in __call__(self, *args)
       546             hook(self, args)
       547 
   --> 548         out = self.forward(*args)
       549 
       550         for hook in self._forward_hooks.values():
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in forward(self, x, *args)
       913             with x.context as ctx:
       914                 if self._active:
   --> 915                     return self._call_cached_op(x, *args)
       916 
       917                 try:
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in _call_cached_op(self, *args)
       803     def _call_cached_op(self, *args):
       804         if self._cached_op is None:
   --> 805             self._build_cache(*args)
       806 
       807         args, fmt = _flatten(args, "input")
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in _build_cache(self, *args)
       755 
       756     def _build_cache(self, *args):
   --> 757         data, out = self._get_graph(*args)
       758         data_names = {data.name : i for i, data in enumerate(data)}
       759         params = self.collect_params()
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in _get_graph(self, *args)
       747             params = {i: j.var() for i, j in self._reg_params.items()}
       748             with self.name_scope():
   --> 749                 out = self.hybrid_forward(symbol, *grouped_inputs, **params)  # pylint: disable=no-value-for-parameter
       750             out, self._out_format = _flatten(out, "output")
       751 
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/nn/basic_layers.py in hybrid_forward(self, F, x)
       115     def hybrid_forward(self, F, x):
       116         for block in self._children.values():
   --> 117             x = block(x)
       118         return x
       119 
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in __call__(self, *args)
       546             hook(self, args)
       547 
   --> 548         out = self.forward(*args)
       549 
       550         for hook in self._forward_hooks.values():
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in forward(self, x, *args)
       928             "HybridBlock requires the first argument to forward be either " \
       929             "Symbol or NDArray, but got %s"%type(x)
   --> 930         params = {i: j.var() for i, j in self._reg_params.items()}
       931         with self.name_scope():
       932             return self.hybrid_forward(symbol, x, *args, **params)
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in <dictcomp>(.0)
       928             "HybridBlock requires the first argument to forward be either " \
       929             "Symbol or NDArray, but got %s"%type(x)
   --> 930         params = {i: j.var() for i, j in self._reg_params.items()}
       931         with self.name_scope():
       932             return self.hybrid_forward(symbol, x, *args, **params)
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/parameter.py in var(self)
       602             self._var = symbol.var(self.name, shape=self.shape, dtype=self.dtype,
       603                                    lr_mult=self.lr_mult, wd_mult=self.wd_mult,
   --> 604                                    init=self.init, stype=self._stype)
       605         return self._var
       606 
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/symbol/symbol.py in var(name, attr, shape, lr_mult, wd_mult, dtype, init, stype, **kwargs)
      2649     if init is not None:
      2650         if not isinstance(init, string_types):
   -> 2651             init = init.dumps()
      2652         attr['__init__'] = init
      2653     if stype is not None:
   
   ~/anaconda3/lib/python3.7/site-packages/mxnet/initializer.py in dumps(self)
       114         '["xavier", {"rnd_type": "uniform", "magnitude": 2.34, "factor_type": "in"}]'
       115         """
   --> 116         return json.dumps([self.__class__.__name__.lower(), self._kwargs])
       117 
       118     def __call__(self, desc, arr):
   
   ~/anaconda3/lib/python3.7/json/__init__.py in dumps(obj, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
       229         cls is None and indent is None and separators is None and
       230         default is None and not sort_keys and not kw):
   --> 231         return _default_encoder.encode(obj)
       232     if cls is None:
       233         cls = JSONEncoder
   
   ~/anaconda3/lib/python3.7/json/encoder.py in encode(self, o)
       197         # exceptions aren't as detailed.  The list call should be roughly
       198         # equivalent to the PySequence_Fast that ''.join() would do.
   --> 199         chunks = self.iterencode(o, _one_shot=True)
       200         if not isinstance(chunks, (list, tuple)):
       201             chunks = list(chunks)
   
   ~/anaconda3/lib/python3.7/json/encoder.py in iterencode(self, o, _one_shot)
       255                 self.key_separator, self.item_separator, self.sort_keys,
       256                 self.skipkeys, _one_shot)
   --> 257         return _iterencode(o, 0)
       258 
       259 def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
   
   ~/anaconda3/lib/python3.7/json/encoder.py in default(self, o)
       177 
       178         """
   --> 179         raise TypeError(f'Object of type {o.__class__.__name__} '
       180                         f'is not JSON serializable')
       181 
   
   TypeError: Object of type ndarray is not JSON serializable
   
   ​
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services