HybridBlock wrapper of constant tensor

Dear all,

I am trying to create a HybridBlock custom Convolutional layer. This needs a constant nd.array. Based on discussions here and here I am creating a custom initializer and then I create inside HybridBlock a custom parameter self.bijkl that is initialized with a custom initializer CustomInit. The full example is here:


import mxnet as mx
from mxnet import nd, gluon
from mxnet.gluon import HybridBlock

class CustomInit(mx.initializer.Initializer):
    def __init__(self,
                 kernel_size = 3, 
                 kernel_effective_size = 5, 
         # A trivial constant tensor
        self.Bijkl = nd.random_uniform(shape=[kernel_size,kernel_size,kernel_effective_size,kernel_effective_size])
    def _init_weight(self,name,arr):
        arr[:] = self.Bijkl

class Conv2DS(HybridBlock):
    def __init__(self,  nfilters, nchannels=0, kernel_size = 3, kernel_effective_size = 5, use_bias = True, padding = (0,0), **kwards):
        self.nchannels = nchannels
        self.nfilters = nfilters
        self.kernel_size = kernel_size
        self.kernel_eff = kernel_effective_size
        self.use_bias = use_bias
        self.pad = padding
        with self.name_scope():

            self.weight = self.params.get(
             # This is the custom tensor I need to create, a constant. 
            self.Bijkl = self.params.get(
                init = CustomInit(self.kernel_size, self.kernel_eff),

            if self.use_bias:
                self.bias = self.params.get(
                    init = mx.init.Zero(),

    def hybrid_forward(self,F,_x):
        # These finalize deferring the input shape 
        # --------------------------------------------------------------------------------
        #self.weight.shape = (self.nfilters,_x.shape[1],self.kernel_size,self.kernel_size)
        # -------------------------------------------------------------------------------- 
        weight = F.sum(F.dot(self.weight.data() , self.Bijkl.data()),axis=[2,3])
        if self.use_bias:
            conv = F.Convolution(data=_x,
                                  pad = self.pad)
        else : 
            conv = F.Convolution(data=_x,
                                  pad = self.pad)

        return conv    

I can initialize my layer:

nchannels = 8
nfilters = 16
dim = 128

mynet = Conv2DS(nfilters, kernel_size=5, kernel_effective_size= 15, use_bias=False)


but when I run a single pass I get the following error:

xx = nd.random_uniform(shape = [4,nchannels,dim,dim],ctx=mx.gpu())
temp = mynet (xx)

Error code:

TypeError                                 Traceback (most recent call last)
<ipython-input-5-27f92d21b3eb> in <module>()
----> 1 temp = mynet (xx)

/home/dia021/anaconda2/lib/python2.7/site-packages/mxnet/gluon/block.pyc in __call__(self, *args)
    358     def __call__(self, *args):
    359         """Calls forward. Only accepts positional arguments."""
--> 360         return self.forward(*args)
    362     def forward(self, *args):

/home/dia021/anaconda2/lib/python2.7/site-packages/mxnet/gluon/block.pyc in forward(self, x, *args)
    568                     params = {i: j.data(ctx) for i, j in self._reg_params.items()}
    569                 except DeferredInitializationError:
--> 570                     self._finish_deferred_init(self._active, x, *args)
    572                 if self._active:

/home/dia021/anaconda2/lib/python2.7/site-packages/mxnet/gluon/block.pyc in _finish_deferred_init(self, hybrid, *args)
    459     def _finish_deferred_init(self, hybrid, *args):
--> 460         self.infer_shape(*args)
    461         if hybrid:
    462             for is_arg, i in self._cached_op_args:

/home/dia021/anaconda2/lib/python2.7/site-packages/mxnet/gluon/block.pyc in infer_shape(self, *args)
    519     def infer_shape(self, *args):
    520         """Infers shape of Parameters from inputs."""
--> 521         self._infer_attrs('infer_shape', 'shape', *args)
    523     def infer_type(self, *args):

/home/dia021/anaconda2/lib/python2.7/site-packages/mxnet/gluon/block.pyc in _infer_attrs(self, infer_fn, attr, *args)
    507     def _infer_attrs(self, infer_fn, attr, *args):
    508         """Generic infer attributes."""
--> 509         inputs, out = self._get_graph(*args)
    510         args, _ = _flatten(args)
    511         arg_attrs, _, aux_attrs = getattr(out, infer_fn)(

/home/dia021/anaconda2/lib/python2.7/site-packages/mxnet/gluon/block.pyc in _get_graph(self, *args)
    426             params = {i: j.var() for i, j in self._reg_params.items()}
    427             with self.name_scope():
--> 428                 out = self.hybrid_forward(symbol, *grouped_inputs, **params)  # pylint: disable=no-value-for-parameter
    429             out, self._out_format = _flatten(out)

TypeError: hybrid_forward() got an unexpected keyword argument 'Bijkl'

I tihnk the problem is with the variable name? self.Bijkl ? So I guess my question is, is there currently a simple way to wrap a constant variable (tensor, of shape [n1,n2,n3,n4], inside a HybridBlock? I have a solution for Block, but I want to take advantage of HybridBlock performance.

Thank you for your time and the great work you put into mxnet!

Reading the code (https://github.com/apache/incubator-mxnet/blob/master/python/mxnet/gluon/block.py#L426) it feels like All the params created in the subclass of HybridBlock are registered in a dict and passed to hybrid_forward as kwargs. So you should be able to do something like this:

def hybrid_forward(self, F, _x, *args, **kwargs):
     weight = F.sum(F.dot(kwargs["weight"], kwargs["bijkl"]),axis=[2,3])
1 Like