Type inconsistency (using gluon pretrained model)

code

import mxnet as mx
from mxnet.gluon.model_zoo import vision
import cv2
import numpy as np

ctx = mx.gpu(6) # successful
net = vision.alexnet(pretrained=True, ctx=ctx)

# preparing input image. 
# You may ignore this process. This just preprocess an image for the net.
# To load input image as shape (batch=1, channel=3, width, height)
im = cv2.imread(‘img.jpg’) # w,h = 4032,3024. rgb color image
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB).astype(float)/255
im = mx.image.color_normalize(im, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) 
im = np.transpose(im, (2,0,1)) # (4032,3024,3) -> (3,4032,3024)
im = im[None,:] # (3,4032,3024) -> (1,3,4032,3024). this means batchsize=1
im = mx.nd.array(im, ctx=ctx)

# run 
r = net(im)

error

MXNetError: Shape inconsistent, Provided = [4096,9216], inferred shape=(4096,2976000).

But when I use resnet18_v2 instead of alexnet, this code runs well.
Any suggestions?

Thank you for advance.

Hi @Pilhoon_Jang,

I don’t have access to the specific image, so I take for granted only the dimensionality of the input you give, i.e. im.shape == (1,3,4032,3024). In my experiments (mxnet version 1.3.0) both networks complete a forward call, either hybridized or not:

import mxnet as mx 
from mxnet.gluon.model_zoo import vision
from mxnet import nd, gluon

net1 = vision.alexnet(pretrained=False)
net2 = vision.resnet18_v2(pretrained=False)

net1.initialize()
net2.initialize()

# Getting a random input at the shape of the image you are feeding into the network
im = nd.random.uniform(shape=[1,3,4032,3024])

r1 = net1(im) # runs OK
r2 = net2(im) # runs OK

# Some outputs
In [13]: r1.shape
Out[13]: (1, 1000)

In [14]: r2.shape
Out[14]: (1, 1000)

In [15]: im.shape
Out[15]: (1, 3, 4032, 3024)

In [16]: mx.__version__
Out[16]: '1.3.0'
# Passes with hybridized as well
In [17]: net1.hybridize()

In [18]: net2.hybridize()

In [19]: r1 = net1(im)

In [20]: r2 = net2(im)

so, maybe check the input image has the correct dimensions, and that you have the same mxnet version? Hope this helps.

@feevos I’m very thankful for your kind answer. I’m using 1.3.0(mx.__version__) and I tried same thing with you. But my machine produces same error. I cannot figure out what this error means.

full code

import mxnet as mx
from mxnet.gluon.model_zoo import vision
ctx = mx.gpu(6)
net = vision.alexnet(pretrained=True, ctx=ctx)
im = mx.nd.random.uniform(shape=[1,3,4032,3024], ctx=ctx)
net(im)

error

---------------------------------------------------------------------------
MXNetError                                Traceback (most recent call last)
 in 
      5 net = vision.alexnet(pretrained=True, ctx=ctx)
      6 im = mx.nd.random.uniform(shape=[1,3,4032,3024], ctx=ctx)
----> 7 net(im)

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in __call__(self, *args)
    539             hook(self, args)
    540 
--> 541         out = self.forward(*args)
    542 
    543         for hook in self._forward_hooks.values():

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in forward(self, x, *args)
    916                     params = {i: j.data(ctx) for i, j in self._reg_params.items()}
    917 
--> 918                 return self.hybrid_forward(ndarray, x, *args, **params)
    919 
    920         assert isinstance(x, Symbol), \

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/model_zoo/vision/alexnet.py in hybrid_forward(self, F, x)
     64 
     65     def hybrid_forward(self, F, x):
---> 66         x = self.features(x)
     67         x = self.output(x)
     68         return x

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in __call__(self, *args)
    539             hook(self, args)
    540 
--> 541         out = self.forward(*args)
    542 
    543         for hook in self._forward_hooks.values():

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in forward(self, x, *args)
    916                     params = {i: j.data(ctx) for i, j in self._reg_params.items()}
    917 
--> 918                 return self.hybrid_forward(ndarray, x, *args, **params)
    919 
    920         assert isinstance(x, Symbol), \

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/nn/basic_layers.py in hybrid_forward(self, F, x)
    115     def hybrid_forward(self, F, x):
    116         for block in self._children.values():
--> 117             x = block(x)
    118         return x
    119 

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in __call__(self, *args)
    539             hook(self, args)
    540 
--> 541         out = self.forward(*args)
    542 
    543         for hook in self._forward_hooks.values():

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/block.py in forward(self, x, *args)
    916                     params = {i: j.data(ctx) for i, j in self._reg_params.items()}
    917 
--> 918                 return self.hybrid_forward(ndarray, x, *args, **params)
    919 
    920         assert isinstance(x, Symbol), \

~/anaconda3/lib/python3.7/site-packages/mxnet/gluon/nn/basic_layers.py in hybrid_forward(self, F, x, weight, bias)
    219     def hybrid_forward(self, F, x, weight, bias=None):
    220         act = F.FullyConnected(x, weight, bias, no_bias=bias is None, num_hidden=self._units,
--> 221                                flatten=self._flatten, name='fwd')
    222         if self.act is not None:
    223             act = self.act(act)

~/anaconda3/lib/python3.7/site-packages/mxnet/ndarray/register.py in FullyConnected(data, weight, bias, num_hidden, no_bias, flatten, out, name, **kwargs)

~/anaconda3/lib/python3.7/site-packages/mxnet/_ctypes/ndarray.py in _imperative_invoke(handle, ndargs, keys, vals, out)
     90         c_str_array(keys),
     91         c_str_array([str(s) for s in vals]),
---> 92         ctypes.byref(out_stypes)))
     93 
     94     if original_output is not None:

~/anaconda3/lib/python3.7/site-packages/mxnet/base.py in check_call(ret)
    250     """
    251     if ret != 0:
--> 252         raise MXNetError(py_str(_LIB.MXGetLastError()))
    253 
    254 

MXNetError: Shape inconsistent, Provided = [4096,9216], inferred shape=(4096,2976000)
1 Like

When I resize the image to 250,250, inferece suceeded. And 227,227(alexnet’s original input size) also succeeded.

Maybe too large input cannot be handled by alexnet (of my machine’s version.)

It’s very interesting that @feevos ’s machine can handle this. :smile:

1 Like

Hi @Pilhoon_Jang,

I just tried your code, the problem comes when the model is loaded with pretrained = True. So there is something going in with the size of the input for sure, I do not know why, I am sorry I cannot be of much help.

@feevos Wow. That was the problem. I’m very thankful to your advice!

1 Like