# @Time : 2021/8/4 19:32
# @Author : Li Kunlun
# @Description : 模型参数的访问、初始化和共享

from mxnet import init, nd
from mxnet.gluon import nn

net = nn.Sequential()
net.add(nn.Dense(256, activation='relu'))
net.add(nn.Dense(10))
net.initialize()  # 使用默认初始化方式

X = nd.random.uniform(shape=(2, 20))
Y = net(X)  # 前向计算

# 1、访问模型参数

# dense0_ (
#   Parameter dense0_weight (shape=(256, 20), dtype=float32)
#   Parameter dense0_bias (shape=(256,), dtype=float32)
# ) <class 'mxnet.gluon.parameter.ParameterDict'>
print(net[0].params, type(net[0].params))

# Parameter dense0_weight (shape=(256, 20), dtype=float32)
# Parameter dense0_weight (shape=(256, 20), dtype=float32)
print(net[0].params['dense0_weight'], net[0].weight)

# <NDArray 256x20 @cpu(0)>
print(net[0].weight.data())

# <NDArray 256x20 @cpu(0)>
print(net[0].weight.grad())

"""
使用collect_params函数来获取net变量所有嵌套（例如通过add函数嵌套）的层所包含的所有参数
    sequential0_ (
      Parameter dense0_weight (shape=(256, 20), dtype=float32)
      Parameter dense0_bias (shape=(256,), dtype=float32)
      Parameter dense1_weight (shape=(10, 256), dtype=float32)
      Parameter dense1_bias (shape=(10,), dtype=float32)
    )
"""
print(net.collect_params())

"""
正则表达式匹配参数名：
    sequential0_ (
      Parameter dense0_weight (shape=(256, 20), dtype=float32)
      Parameter dense1_weight (shape=(10, 256), dtype=float32)
    )
"""
print(net.collect_params('.*weight'))

# 2、初始化模型参数
# 非首次对模型初始化需要指定force_reinit为真
net.initialize(init=init.Normal(sigma=0.01), force_reinit=True)
# [ 0.00195949 -0.0173764   0.00047347  0.00145809  0.00326049  0.00457878
#  -0.00894258  0.00493839 -0.00904343 -0.01214079  0.02156406  0.01093822
#   0.01827143 -0.0104467   0.01006219  0.0051742  -0.00806932  0.01376901
#   0.00205885  0.00994352]
# <NDArray 20 @cpu(0)>
print(net[0].weight.data()[0])

# 使用常数来初始化权重参数
# [ 1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.
#   1.  1.]
# <NDArray 20 @cpu(0)>
net.initialize(init=init.Constant(1), force_reinit=True)
print(net[0].weight.data()[0])

# 对某个特定参数进行初始化
net[0].weight.initialize(init=init.Xavier(), force_reinit=True)
print(net[0].weight.data()[0])


# 3、自定义初始化方法
class MyInit(init.Initializer):
    def _init_weight(self, name, data):
        print('Init', name, data.shape)
        data[:] = nd.random.uniform(low=-10, high=10, shape=data.shape)
        data *= data.abs() >= 5


net.initialize(MyInit(), force_reinit=True)
print(net[0].weight.data()[0])

# 通过Parameter类的set_data函数来直接改写模型参数
"""
[-5.36596727  7.57739449  8.98637581 -0.          8.8275547   0.
  5.98405075 -0.          0.          0.          7.48575974 -0.         -0.
  6.89100075  6.97887039 -6.11315536  0.          5.46652031 -9.73526287
  9.48517227]
<NDArray 20 @cpu(0)>

[ -4.36596727   8.57739449   9.98637581   1.           9.8275547    1.
   6.98405075   1.           1.           1.           8.48575974   1.           1.
   7.89100075   7.97887039  -5.11315536   1.           6.46652031
  -8.73526287  10.48517227]
<NDArray 20 @cpu(0)>
"""
net[0].weight.set_data(net[0].weight.data() + 1)
print(net[0].weight.data()[0])

# 4、共享模型参数
"""
模型的第二隐藏层（shared变量）和第三隐藏层共享模型参数。
"""
net = nn.Sequential()
shared = nn.Dense(8, activation='relu')
net.add(nn.Dense(8, activation='relu'),
        shared,
        nn.Dense(8, activation='relu', params=shared.params),
        nn.Dense(10))
net.initialize()

X = nd.random.uniform(shape=(2, 20))
net(X)

# [ 1.  1.  1.  1.  1.  1.  1.  1.]
# <NDArray 8 @cpu(0)>
print(net[1].weight.data()[0] == net[2].weight.data()[0])
