Get HybridBlock layer shape on runtime

Thank you very much @safrooze. I don’t know though if it can help in my situation (I sure hope it can!).
I’ve tested what you suggest, like this, and it works:

from mxnet import gluon
from mxnet.gluon import  Block
from mxnet.ndarray import NDArray

# This is a simple wrapper for Conv2D + BatchNorm 
from phaino.nn.layers.conv2Dnormed import *


class PSP_Pooling(Block):

    """
    Pyramid Scene Parsing pooling layer, as defined in Zhao et al. 2017 (https://arxiv.org/abs/1612.01105)        
    This is only the pyramid pooling module. 
    INPUT:
        layer of size Nbatch, Nchannel, H, W
    OUTPUT:
        layer of size Nbatch,  Nchannel, H, W. 

    """

    def __init__(self, _nfilters, _norm_type = 'BatchNorm', **kwards):
        Block.__init__(self,**kwards)

        self.nfilters = _nfilters

        # This is used as a container (list) of layers
        self.convs = gluon.nn.HybridSequential()
        with self.name_scope():

            self.convs.add(Conv2DNormed(self.nfilters//4,kernel_size=(3,3),padding=(1,1), prefix="_conv1_"))
            self.convs.add(Conv2DNormed(self.nfilters//4,kernel_size=(3,3),padding=(1,1), prefix="_conv2_"))
            self.convs.add(Conv2DNormed(self.nfilters//4,kernel_size=(3,3),padding=(1,1), prefix="_conv3_"))
            self.convs.add(Conv2DNormed(self.nfilters//4,kernel_size=(3,3),padding=(1,1), prefix="_conv4_"))
            
        self.conv_norm_final = Conv2DNormed(channels = self.nfilters,
                                            kernel_size=(1,1),
                                            padding=(0,0),
                                            _norm_type=_norm_type)



    def forward(self,_input):
        
        layer_size = _input.shape[2]
        
        p = [_input]
        for i in range(4):

            pool_size = layer_size // (2**i) # Need this to be integer 
            x = nd.Pooling(_input,kernel=[pool_size,pool_size],stride=[pool_size,pool_size],pool_type='max')
            x = nd.UpSampling(x,sample_type='nearest',scale=pool_size)
            x = self.convs[i](x)
            p += [x]

        out = nd.concat(p[0],p[1],p[2],p[3],p[4],dim=1)

        out = self.conv_norm_final(out)

        return out


nfilters = 32

net = gluon.nn.Sequential()
with net.name_scope():
    net.add(gluon.nn.Conv2D(nfilters,kernel_size=(3,3),padding=(1,1)))
    net.add(PSP_Pooling(nfilters))

net.initialize(mx.initializer.Xavier())
net.hybridize()

however for my needs I need to use the PSP_Pooling layer inside another network and it is not sequential in nature (for semantic segmentation, it follows the encoder-decoder paradigm where one uses previous layers with addition/concatenation (skip connections)). So there I am getting errors. For example this fails:

    
#This doesn't work 
class CustomNet (HybridBlock):
    
    def __init__(self,nfilters,**kwards):
        HybridBlock.__init__(self,**kwards)
        
        
        
        with self.name_scope():
            
            self.conv1 = Conv2DNormed(nfilters,kernel_size=3,padding=1)
            self.psp = PSP_Pooling(nfilters)
            
    def hybrid_forward(self,F,x):
        
        out1 = self.conv1(x)
        out1 = F.relu(out1)
        out1 = self.psp(out1)
        
        # Need to combine layers within the network
        # Simple example: addition, can be addition and/or concatenation
        out1 = out1+x
        
        
        return out1

with the following error:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-18-cf7dc25a350f> in <module>()
      1 nfilters  = 32
----> 2 net = CustomNet(nfilters)

<ipython-input-17-8795e81219a2> in __init__(self, nfilters, **kwards)
    109 
    110             self.conv1 = Conv2DNormed(nfilters,kernel_size=3,padding=1)
--> 111             self.psp = PSP_Pooling(nfilters)
    112 
    113     def hybrid_forward(self,F,x):

~/anaconda3/lib/python3.6/site-packages/mxnet/gluon/block.py in __setattr__(self, name, value)
    404     def __setattr__(self, name, value):
    405         """Registers parameters."""
--> 406         super(HybridBlock, self).__setattr__(name, value)
    407         if isinstance(value, HybridBlock):
    408             self._clear_cached_op()

~/anaconda3/lib/python3.6/site-packages/mxnet/gluon/block.py in __setattr__(self, name, value)
    197                 self.register_child(value)
    198         elif isinstance(value, Block):
--> 199             self.register_child(value)
    200 
    201         super(Block, self).__setattr__(name, value)

~/anaconda3/lib/python3.6/site-packages/mxnet/gluon/block.py in register_child(self, block)
    491                 "but %s has type %s. If you are using Sequential, " \
    492                 "please try HybridSequential instead"%(
--> 493                     str(block), str(type(block))))
    494         super(HybridBlock, self).register_child(block)
    495         self._clear_cached_op()

ValueError: Children of HybridBlock must also be HybridBlock, but PSP_Pooling(
  (convs): HybridSequential(
    (0): Conv2DNormed(
      (conv2d): Conv2D(None -> 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (norm_layer): BatchNorm(axis=1, eps=1e-05, momentum=0.9, fix_gamma=False, use_global_stats=False, in_channels=None)
    )
    (1): Conv2DNormed(
      (conv2d): Conv2D(None -> 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (norm_layer): BatchNorm(axis=1, eps=1e-05, momentum=0.9, fix_gamma=False, use_global_stats=False, in_channels=None)
    )
    (2): Conv2DNormed(
      (conv2d): Conv2D(None -> 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (norm_layer): BatchNorm(axis=1, eps=1e-05, momentum=0.9, fix_gamma=False, use_global_stats=False, in_channels=None)
    )
    (3): Conv2DNormed(
      (conv2d): Conv2D(None -> 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (norm_layer): BatchNorm(axis=1, eps=1e-05, momentum=0.9, fix_gamma=False, use_global_stats=False, in_channels=None)
    )
  )
  (conv_norm_final): Conv2DNormed(
    (conv2d): Conv2D(None -> 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
    (norm_layer): BatchNorm(axis=1, eps=1e-05, momentum=0.9, fix_gamma=False, use_global_stats=False, in_channels=None)
  )
) has type <class '__main__.PSP_Pooling'>. If you are using Sequential, please try HybridSequential instead

but this works:

class CustomNet (Block):
    
    def __init__(self,nfilters,**kwards):
        Block.__init__(self,**kwards)
        
        
        
        with self.name_scope():
            
            self.conv1 = Conv2DNormed(nfilters,kernel_size=3,padding=1)
            self.psp = PSP_Pooling(nfilters)
            
    def forward(self,x):
        
        out1 = self.conv1(x)
        out1 = nd.relu(out1)
        out1 = self.psp(out1)
        
        
        # Simple addition
        out1 = out1+x
        
        
        return out1

Any ideas if I can make it to work for my case? Thank you very much for all the help.