Register self defined parameter in mxnet

class Net(HybridBlock):
    def __init__(self):
        super(Net, self).__init__()
        self.op = nn.Conv2D(10, kernel_size=5, strides=1, padding=1)
        self.p = mx.gluon.Parameter(name='ppp', shape=(1))

    def hybrid_forward(self, F, x):
        return self.op(x) * self.p

if __name__ == '__main__':
    img = mx.nd.zeros((2,3,28,28))
    model = Net()
    out = model(img)

I got the Error

RuntimeError: Parameter 'ppp' has not been initialized. Note that you should initialize parameters and create Trainer with Block.collect_params() instead of Block.params because the later does not include Parameters of nested child Blocks

So how to register the self defined Parameters self.p in mxnet? so that the model.collect_params() will contain the self.p

Thank you.

Hi @Alpha,

This tutorial explain to you how to attach parameter to a layer (without hybridization) :

In short, for hybridization you can use:

import mxnet as mx
from mxnet import gluon, nd
from mxnet.gluon import nn

class MyDense(nn.HybridBlock):
    # units: the number of outputs in this layer; in_units: the number of
    # inputs in this layer
    def __init__(self, units, in_units, **kwargs):
        super(MyDense, self).__init__(**kwargs)
        self.weight = self.params.get('weight', shape=(in_units, units))
        self.bias = self.params.get('bias', shape=(units,))

    def hybrid_forward(self, F, x, weight, bias):
        linear =, weight) + bias
        return F.relu(linear)
net = MyDense(20,20)
[0.31464472 0.         0.         0.07762438 0.         0.29829207
 0.12118429 0.25685543 0.         0.18107665 0.         0.00356743
 0.         0.03836098 0.         0.         0.         0.24950406
 0.13381818 0.16004124]
<NDArray 20 @cpu(0)>

and you can see:

mydense2_ (
  Parameter mydense2_weight (shape=(20, 20), dtype=<class 'numpy.float32'>)
  Parameter mydense2_bias (shape=(20,), dtype=<class 'numpy.float32'>)
1 Like