_init_bias() is not invoked

I have the following code snippet:

1 from mxnet import nd, init
2 from mxnet.gluon import nn
3
4 class MyInit(init.Initializer):
5 def init(self):
6 super(MyInit, self).init()
7 self._verbose = True
8 def _init_weight(self, _, arr):
9 print(‘init weight’, arr.shape)
10 nd.random.uniform(low=5, high=10, out=arr)
11
12 def _init_bias(self, _, arr):
13 print(‘init bias’, arr.shape)
14 arr[:] = 2
15
16
17 net = nn.Sequential()
18 net.add(nn.Dense(20), nn.Dense(10))
19
20 params = net.collect_params()
21 params.initialize(init=MyInit(), force_reinit=True)
22
23 data = nd.random.uniform(low=5, high=10, shape=(4, 6))
24 net(data)

However, when I execute this code, I got the following result:
init weight (20, 6)
init weight (10, 20)

The _init_bias() is not invoked.
The values of net[1].bias.data() are:
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]

Could anyone help explain why this happens?
Much appreciated.

_init_bias does not work in Gluon: it will call _init_weight for both weights and biases. You can do the following instead:

from mxnet import nd, init
from mxnet.gluon import nn


class MyInit(mx.init.Initializer):
  def __init__(self):
       super(MyInit, self).__init__()
  def _init_weight(self, _, arr):
    nd.random.uniform(low=5, high=10, out=arr)

class MyInitBias(mx.init.Initializer):
  def __init__(self):
      super(MyInitBias, self).__init__()
  def _init_weight(self, _, arr):
    arr[:] = 2


net = nn.Sequential()
net.add(nn.Dense(20), nn.Dense(10))

biases = net.collect_params(select=".*bias.*")
for i in biases:
   biases[i].initialize(MyInitBias())

weights = net.collect_params(select=".*weight.*")
for i in weights:
      weights[i].initialize(MyInit())

data = nd.random.uniform(low=5, high=10, shape=(4, 6))
net(data)
print biases["dense1_bias"].list_data()
print weights["dense1_weight"].list_data()

This initializes the biases and weights separately.

Hi, thank you very much for reply. I get it now.