I find that the first output is different from the seconde output.
why?
Is it a bug?
help!
Thank you!
code:
from mxnet import gluon, image, init, nd, autograd, gpu, cpu
from mxnet.gluon import data as gdata, loss as gloss, model_zoo, nn
import numpy as np
import mxnet as mx
ctx = gpu(0)
conv = nn.Conv2D(in_channels=3, channels=32, kernel_size=3,
strides=1, padding=1, use_bias=False)
bn = nn.BatchNorm(momentum=0.1, in_channels=32)
ru = nn.LeakyReLU(alpha=0.2)
convt = nn.Conv2DTranspose(in_channels=32, channels=64, kernel_size=3,
strides=2, padding=1, output_padding=1, use_bias=False)
model = [conv,bn,ru,convt]
net = nn.HybridSequential()
with net.name_scope():
for layer in model:
net.add(layer)
net.collect_params('.*gamma|.*running_var').initialize(mx.init.Constant(1), ctx=ctx)
net.collect_params('.*beta|.*bias|.*running_mean').initialize(mx.init.Constant(0),ctx=ctx)
net.collect_params('.*weight').initialize(init=mx.init.Constant(1),ctx=ctx)
x=nd.ones((1,3,352,640))
with autograd.record():
y = net(x.as_in_context(ctx))
# np.save('dd1', y.asnumpy())
s_pred = np.load('dd1.npy')
t_pred = y.asnumpy()
sub = s_pred - t_pred
print (sub.sum())