Is it possible to access intermediate layers in hybrid blocks? For example, in the following code (source) is it possible to access the batchnorm layer? Indexing doesn’t work and I get the error stating LinearBottleneck does not support indexing.
class LinearBottleneck(nn.HybridBlock):
"""LinearBottleneck used in MobileNetV2 model from the
`"Inverted Residuals and Linear Bottlenecks:
Mobile Networks for Classification, Detection and Segmentation"
<https://arxiv.org/abs/1801.04381>`_ paper.
Parameters
----------
in_channels : int
Number of input channels.
channels : int
Number of output channels.
t : int
Layer expansion ratio.
stride : int
stride
"""
def __init__(self, in_channels, channels, t, stride, **kwargs):
super(LinearBottleneck, self).__init__(**kwargs)
self.use_shortcut = stride == 1 and in_channels == channels
with self.name_scope():
self.out = nn.HybridSequential()
_add_conv(self.out, in_channels * t, relu6=True)
_add_conv(self.out, in_channels * t, kernel=3, stride=stride,
pad=1, num_group=in_channels * t, relu6=True)
_add_conv(self.out, channels, active=False, relu6=True)
def hybrid_forward(self, F, x):
out = self.out(x)
if self.use_shortcut:
out = F.elemwise_add(out, x)
return out
# pylint: disable= too-many-arguments
def _add_conv(out, channels=1, kernel=1, stride=1, pad=0,
num_group=1, active=True, relu6=False):
out.add(nn.Conv2D(channels, kernel, stride, pad, groups=num_group, use_bias=False))
out.add(nn.BatchNorm(scale=True))
if active:
out.add(RELU6() if relu6 else nn.Activation('relu'))
The reason I want to access this is because I want to set use_global_stats in the batchnorm layers to False for training another task branch. Is it possible to do so in Gluon?