Pass image to get gradient

I am trying to get the gradient with respect to a specific images (sitting in adv_loader). These images are loaded in adv_loader. I tried just taking the code that was calculating the gradient in the backprop step Yolo3 doesn’t seem to have a grad attribute. Any ideas on how to get that?

net = gcv.model_zoo.yolo3_mobilenet0_25_custom(pretrained_base=False,classes=classes,ctx=ctx)
import time
net.initialize(force_reinit=True)
net.collect_params().reset_ctx(ctx)

#Grab a trainer or optimizer to perform the optimization    
trainer = mx.gluon.Trainer(net.collect_params(),'adam',
                            {'learning_rate':0.001},
                            kvstore='device')


for i in range(nepochs):
    now = time.time()
    mx.nd.waitall()
    net.hybridize(static_alloc=True,static_shape=True)
    for ixl,batch in enumerate(train_loader):
        data = mx.gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_axis=0)
        # objectness, center_targets, scale_targets, weights, class_targets
        fixed_targets = [mx.gluon.utils.split_and_load(batch[it], ctx_list=ctx, batch_axis=0) for it in range(1, 6)]
        gt_boxes = mx.gluon.utils.split_and_load(batch[6], ctx_list=ctx, batch_axis=0)
        sum_losses = []
        with autograd.record():
            for ix, x in enumerate(data):
                obj_loss, center_loss, scale_loss, cls_loss = net(x, gt_boxes[ix], *[ft[ix] for ft in fixed_targets])
                sum_losses.append(obj_loss + center_loss + scale_loss + cls_loss)
            autograd.backward(sum_losses)
        trainer.step(batch_size)
    
for ixl,batch in enumerate(adv_loader):
    data = mx.gluon.utils.split_and_load(batch[0], ctx_list=ctx, batch_axis=0)
    # objectness, center_targets, scale_targets, weights, class_targets
    fixed_targets = [mx.gluon.utils.split_and_load(batch[it], ctx_list=ctx, batch_axis=0) for it in range(1, 6)]
    gt_boxes = mx.gluon.utils.split_and_load(batch[6], ctx_list=ctx, batch_axis=0)
    sum_losses = []
    with autograd.record():
        for ix, x in enumerate(data):
            obj_loss, center_loss, scale_loss, cls_loss = net(x, gt_boxes[ix], *[ft[ix] for ft in fixed_targets])
            sum_losses.append(obj_loss + center_loss + scale_loss + cls_loss)
        autograd.backward(sum_losses,retain_graph=True)
        print(net.grad.asnumpy())