import tensorflow as tf
from tensorflow import keras
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split

dataset = fetch_california_housing()
train_x, test_x, train_y, test_y = train_test_split(dataset.data,dataset.target,test_size=0.2,random_state=42)

# 自定义层： 
# 1.子类的方式  
class CustomizedDense(keras.layers.Layer):
    """ 初始化 单元数和激活函数 """
    def __init__(self,units,activation=None,**kwargs):  # **kwargs 可以传入一个参数的字典
        self.units=units  # 输出的神经元有多少个  一般定义keras.layers.Dense(units)就是这个units
        self.activation = keras.layers.Activation(activation)
        super(CustomizedDense,self).__init__(**kwargs)  

    """ 构建参数 """  
    # x [None,input_shape] * w [input_shape,output_shape]+ b [output_shape,] = y [None, output_shape]  
    def build(self,input_shape):
        self.kernel = self.add_weight(name='kernel',shape=(input_shape[1],self.units),initializer='uniform',
        trainable=True)
        self.bias = self.add_weight(name='bias',shape=(self.units,),initializer='zeros',trainable=True)
        super(CustomizedDense,self).build(input_shape)

    """ 正向计算 """  
    def call(self,x):
        return self.activation(x @ self.kernel + self.bias )

# 自定义层
# 2.不需要传入参数时：lambda的方式
CustomizedSelu = keras.layers.Lambda(lambda x : tf.nn.selu(x))  # 输入x  输入tf.nn.selu(x)

model = keras.Sequential([
    CustomizedDense(16,activation='relu',input_shape = train_x.shape[1:]),
    CustomizedDense(1),
    CustomizedSelu  
])

model.compile( 
    optimizer = keras.optimizers.Adam(0.001),
    loss = 'mse',
)

model.fit(train_x, train_y, epochs=10)

