Hi, I want to implement gradCAM on my Module model. I am currently using mxnet 1.2.0.
def get_symbol(self):
# ......
softmax = mx.sym.SoftmaxActivation(data=fc1, mode="instance", name='softmax')
one_hot = mx.sym.one_hot(indices=gt_image_label, depth=cfg.dataset.NUM_CLASSES - 1, name='one_hot_label')
_dot = softmax * one_hot
_loss = mx.sym.sum(data=_dot, axis=1)
grad_cam_loss = mx.sym.MakeLoss(name='grad_cam_loss', data=_loss, grad_scale=1.0)
group = grad_cam_loss
return group
mod = mx.mod.Module(symbol=sym,
context=context,
data_names=[k[0] for k in train_iter.provide_data_single],
label_names=[k[0] for k in train_iter.provide_label_single],
fixed_param_names=fixed_param_names)
arg_params, aux_params = load_param(config.network.pretrained)
mod.bind(data_shapes=test_iter.provide_data, label_shapes=test_iter.provide_label, for_training=True,
force_rebind=False, inputs_need_grad=True)
mod.init_params(arg_params=arg_params, aux_params=aux_params)
mod.init_optimizer(kvstore='device', optimizer='sgd', optimizer_params=optimizer_params)
databatch = test_iter.next()
mod.forward_backward(databatch)
e = mod._exec_group
# e.grad_arrays?
I want to get gradients of activations, however, I cannot find them. mod._exec_group only contains gradients of arg_params. how can I get a gradient of an activation (e.g. gradient of conv5)?