Reshape not working inside gluon forward

I am running code below in jupyter notebook…

class Block(nn.HybridBlock):
    def __init__(self):
        super(Block, self).__init__()

        self.base_conv = nn.Conv2D(16, (3, 3), activation = 'relu')
        self.conv1 = nn.Conv2D(16, (3, 3), activation = 'relu')

    def hybrid_forward(self, F, x):
        convolved = self.base_conv(x)
        reshaped = convolved.reshape((-1, 64, 15, 15))
        return self.conv1(reshaped)

block = Block()
block.initialize()
block(image.expand_dims(0))

Image has shape (3, 32, 32)

And its showing error:-

---------------------------------------------------------------------------
MXNetError                                Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/IPython/core/formatters.py in __call__(self, obj)
    697                 type_pprinters=self.type_printers,
    698                 deferred_pprinters=self.deferred_printers)
--> 699             printer.pretty(obj)
    700             printer.flush()
    701             return stream.getvalue()

5 frames
/usr/local/lib/python3.6/dist-packages/mxnet/base.py in check_call(ret)
    251     """
    252     if ret != 0:
--> 253         raise MXNetError(py_str(_LIB.MXGetLastError()))
    254 
    255 

MXNetError: [08:28:47] src/ndarray/ndarray.cc:634: Check failed: !is_view: 
Stack trace:
  [bt] (0) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(+0x4a37ab) [0x7f738de237ab]
  [bt] (1) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(mxnet::NDArray::GetMKLDNNData() const+0x13a) [0x7f73901e1c3a]
  [bt] (2) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(mxnet::op::OpSignature::AddSign(mxnet::NDArray const&)+0xd8) [0x7f738de50f38]
  [bt] (3) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(mxnet::op::GetConvFwd(mxnet::op::ConvolutionParam const&, bool, mxnet::NDArray const&, mxnet::NDArray const&, mxnet::NDArray const*, mxnet::NDArray const&)+0xfe) [0x7f738de798ee]
  [bt] (4) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(mxnet::op::MKLDNNConvolutionForward(nnvm::NodeAttrs const&, mxnet::OpContext const&, std::vector<mxnet::NDArray, std::allocator<mxnet::NDArray> > const&, std::vector<mxnet::OpReqType, std::allocator<mxnet::OpReqType> > const&, std::vector<mxnet::NDArray, std::allocator<mxnet::NDArray> > const&)+0x417) [0x7f738de7a877]
  [bt] (5) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(+0x99009a) [0x7f738e31009a]
  [bt] (6) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(std::_Function_handler<void (mxnet::RunContext), mxnet::imperative::PushFComputeEx(std::function<void (nnvm::NodeAttrs const&, mxnet::OpContext const&, std::vector<mxnet::NDArray, std::allocator<mxnet::NDArray> > const&, std::vector<mxnet::OpReqType, std::allocator<mxnet::OpReqType> > const&, std::vector<mxnet::NDArray, std::allocator<mxnet::NDArray> > const&)> const&, nnvm::Op const*, nnvm::NodeAttrs const&, mxnet::Context const&, std::vector<mxnet::engine::Var*, std::allocator<mxnet::engine::Var*> > const&, std::vector<mxnet::engine::Var*, std::allocator<mxnet::engine::Var*> > const&, std::vector<mxnet::Resource, std::allocator<mxnet::Resource> > const&, std::vector<mxnet::NDArray*, std::allocator<mxnet::NDArray*> > const&, std::vector<mxnet::NDArray*, std::allocator<mxnet::NDArray*> > const&, std::vector<mxnet::OpReqType, std::allocator<mxnet::OpReqType> > const&)::{lambda(mxnet::RunContext)#1}>::_M_invoke(std::_Any_data const&, mxnet::RunContext)+0x12a) [0x7f7390074c9a]
  [bt] (7) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(+0x2652164) [0x7f738ffd2164]
  [bt] (8) /usr/local/lib/python3.6/dist-packages/mxnet/libmxnet.so(+0x265fa71) [0x7f738ffdfa71]

I tried using HybridLambda as below

class Block(nn.HybridBlock):
    def __init__(self):
        super(Block, self).__init__()

        self.base_conv = nn.Conv2D(16, (3, 3), activation = 'relu')
        self.conv1 = nn.Conv2D(16, (3, 3), activation = 'relu')
        self.reshape = nn.HybridLambda(lambda F, x: x.reshape((-1, 64, 15, 15)))

    def hybrid_forward(self, F, x):
        convolved = self.base_conv(x)
        reshaped = self.reshape(convolved)
        return self.conv1(reshaped)

block = Block()
block.initialize()
block(image.expand_dims(0))

and it gave same error!!

I made sure that size before reshaping is same as after reshaping