module agentzoo
export PolicyNet1, PolicyNet2, PolicyNet3, PolicyNet4, PolicyNet5, PolicyNet6, PolicyNet7, PolicyNet8
using Lux,NNlib,Random
# PolicyNet1 definition
# 定义模型结构
struct PolicyNet1 <: Lux.AbstractLuxLayer
    layer1::Dense
    layer2::Dense
    layer3::Dense
end

# 构造函数
function PolicyNet1(input_size::Int, hidden_size::Int, output_size::Int)
    layer1 = Dense(input_size, hidden_size, NNlib.elu)
    layer2 = Dense(input_size, hidden_size)
    layer3 = Dense(hidden_size, output_size)
    return PolicyNet1(layer1, layer2, layer3)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet1)
    ps_layer1 = Lux.initialparameters(rng, model.layer1)
    ps_layer2 = Lux.initialparameters(rng, model.layer2)
    ps_layer3 = Lux.initialparameters(rng, model.layer3)
    return (layer1=ps_layer1, layer2=ps_layer2, layer3=ps_layer3)
end
# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet1)
    st_layer1 = Lux.initialstates(rng, model.layer1)
    st_layer2 = Lux.initialstates(rng, model.layer2)
    st_layer3 = Lux.initialstates(rng, model.layer3)
    return (layer1=st_layer1, layer2=st_layer2, layer3=st_layer3)
end
# 前向传播
function (model::PolicyNet1)(x::AbstractArray, ps, st)
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    # 通过第二个密集层
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    # 将两个层的输出相加
    combined = y1 + y2#cat(y1, y2; dims=1)  # concatenate along the feature dimension
    # 通过第三个密集层
    y, st3 = model.layer3(combined, ps.layer3, st.layer3)
    return y, (layer1=st1, layer2=st2, layer3=st3)
end
# PolicyNet2 definition
# 定义模型结构
struct PolicyNet2 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
end

# 构造函数
function PolicyNet2(input_size::Int, hidden_size::Int, output_size::Int)
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size, NNlib.elu)
    layer2 = Dense(input_size, hidden_size)
    layer3 = Dense(hidden_size, output_size)
    return PolicyNet2(bn, layer1, layer2, layer3)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet2)
    ps_bn = Lux.initialparameters(rng, model.bn)
    ps_layer1 = Lux.initialparameters(rng, model.layer1)
    ps_layer2 = Lux.initialparameters(rng, model.layer2)
    ps_layer3 = Lux.initialparameters(rng, model.layer3)
    return (bn=ps_bn, layer1=ps_layer1, layer2=ps_layer2, layer3=ps_layer3)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet2)
    st_bn = Lux.initialstates(rng, model.bn)
    st_layer1 = Lux.initialstates(rng, model.layer1)
    st_layer2 = Lux.initialstates(rng, model.layer2)
    st_layer3 = Lux.initialstates(rng, model.layer3)
    return (bn=st_bn, layer1=st_layer1, layer2=st_layer2, layer3=st_layer3)
end

# 前向传播
function (model::PolicyNet2)(x::AbstractArray, ps, st)
    # 通过批归一化层
    # st.bn.training=Val{false};#这个报错了，似乎是Val不能改
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    # 通过第二个密集层
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    # 将两个层的输出相加
    combined = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    # 通过第三个密集层
    y, st3 = model.layer3(combined, ps.layer3, st.layer3)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3)
end
# PolicyNet3 definition
# 定义模型结构
struct PolicyNet3 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    ln::LayerNorm
    layerf::Dense
end

# 构造函数
function PolicyNet3(input_size::Int, hidden_size::Int, output_size::Int)
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size, NNlib.elu)
    layer2 = Dense(input_size, hidden_size)
    ln = LayerNorm((hidden_size,))
    layerf = Dense(hidden_size, output_size)
    return PolicyNet3(bn, layer1, layer2, ln, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet3)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    ln = Lux.initialparameters(rng, model.ln)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet3)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    ln = Lux.initialstates(rng, model.ln)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layerf=layerf)
end

# 前向传播
function (model::PolicyNet3)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    # 通过第二个密集层
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    # 将两个层的输出相加
    combined = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    #层归一化
    combined, stln = model.ln(combined, ps.ln, st.ln)
    # 通过第三个密集层
    y, stf = model.layerf(combined, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, ln=stln, layerf=stf)
end
# PolicyNet4 definition
# 定义模型结构
struct PolicyNet4 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    ln::LayerNorm
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet4(input_size::Int, hidden_size::Int, output_size::Int)
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size, NNlib.elu)
    layer2 = Dense(input_size, hidden_size)
    ln = LayerNorm((hidden_size,))
    layer3 = Dense(hidden_size, hidden_size)
    layer4 = Dense(hidden_size, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet4(bn, layer1, layer2, ln, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet4)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    ln = Lux.initialparameters(rng, model.ln)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet4)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    ln = Lux.initialstates(rng, model.ln)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet4)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    #层归一化
    y, stln = model.ln(y, ps.ln, st.ln)
    # 通过第一个密集层
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, ln=stln, layer3=st3, layer4=st4, layerf=stf)
end
# PolicyNet5 definition
# 定义模型结构
struct PolicyNet5 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    ln::LayerNorm
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet5(input_size::Int, hidden_size::Int, output_size::Int)
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size, NNlib.elu)
    layer2 = Dense(input_size, hidden_size)
    ln = LayerNorm((hidden_size,))
    layer3 = Dense(hidden_size, hidden_size)
    layer4 = Dense(hidden_size, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet5(bn, layer1, layer2, ln, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet5)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    ln = Lux.initialparameters(rng, model.ln)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet5)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    ln = Lux.initialstates(rng, model.ln)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet5)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    #层归一化
    lny, stln = model.ln(y, ps.ln, st.ln)
    # 通过第一个密集层
    y1, st3 = model.layer3(lny, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, ln=stln, layer3=st3, layer4=st4, layerf=stf)
end
# PolicyNet6 definition
# 定义模型结构
struct PolicyNet6 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet6(input_size::Int, hidden_size::Int, output_size::Int)
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size, NNlib.elu)
    layer2 = Dense(input_size, hidden_size)
    layer3 = Dense(hidden_size, hidden_size)
    layer4 = Dense(hidden_size, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet6(bn, layer1, layer2, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet6)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet6)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet6)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    # 通过第一个密集层
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layerf=stf)
end
# PolicyNet7 definition
# 定义模型结构
struct PolicyNet7 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    ln::LayerNorm
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet7(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(hidden_size / 4))
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size_med, NNlib.elu)
    layer2 = Dense(input_size, hidden_size_med)
    ln = LayerNorm((hidden_size_med,), affine=false)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.elu)
    layer4 = Dense(hidden_size_med, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet7(bn, layer1, layer2, ln, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet7)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    ln = Lux.initialparameters(rng, model.ln)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet7)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    ln = Lux.initialstates(rng, model.ln)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet7)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    #层归一化
    lny, stln = model.ln(y, ps.ln, st.ln)
    # 通过第一个密集层
    y1, st3 = model.layer3(lny, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, ln=stln, layer3=st3, layer4=st4, layerf=stf)
end
# PolicyNet8 definition
# 定义模型结构
struct PolicyNet8 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet8(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(hidden_size / 4))
    bn = BatchNorm(input_size)
    layer1 = Dense(input_size, hidden_size_med, NNlib.elu)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.elu)
    layer4 = Dense(hidden_size_med, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet8(bn, layer1, layer2, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet8)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet8)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet8)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    # 通过第一个密集层
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layerf=stf)
end
# PolicyNet9 definition
# 定义模型结构
struct PolicyNet9 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    ln::LayerNorm
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet9(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(hidden_size / 4))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.elu)
    layer2 = Dense(input_size, hidden_size_med)
    ln = LayerNorm((hidden_size_med,), affine=false)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.elu)
    layer4 = Dense(hidden_size_med, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet9(bn, layer1, layer2, ln, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet9)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    ln = Lux.initialparameters(rng, model.ln)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet9)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    ln = Lux.initialstates(rng, model.ln)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, ln=ln, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet9)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    #层归一化
    lny, stln = model.ln(y, ps.ln, st.ln)
    # 通过第一个密集层
    y1, st3 = model.layer3(lny, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, ln=stln, layer3=st3, layer4=st4, layerf=stf)
end
export PolicyNet9
# PolicyNet10 definition
# 定义模型结构
struct PolicyNet10 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet10(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(hidden_size / 4))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.elu)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.elu)
    layer4 = Dense(hidden_size_med, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet10(bn, layer1, layer2, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet10)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet10)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet10)(x::AbstractArray, ps, st)
    # 进行批归一化(网络运算时每次只进来一个数不能统计bn，所以要预先在外面积累多个批次直接赋值)
    x = (x.-st.bn.running_mean)./(1e-5.+sqrt.(st.bn.running_var))
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    # 通过第一个密集层
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layerf=stf)
end
export PolicyNet10
# PolicyNet11 definition
# 定义模型结构
struct PolicyNet11 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    ln::LayerNorm
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet11(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(sqrt(hidden_size * input_size)))
    hidden_size_med2 = Int(floor(sqrt(hidden_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.elu)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.elu)
    layer4 = Dense(hidden_size_med, hidden_size)
    ln = LayerNorm((hidden_size,), affine=false)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.elu)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet11(bn, layer1, layer2, layer3, layer4, ln, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet11)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    ln = Lux.initialparameters(rng, model.ln)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, ln=ln, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet11)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    ln = Lux.initialstates(rng, model.ln)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, ln=ln, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet11)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #层归一化
    lny, stln = model.ln(y, ps.ln, st.ln)
    #
    y1, st5 = model.layer5(lny, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, ln=stln, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet11
# PolicyNet12 definition
# 定义模型结构
struct PolicyNet12 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet12(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(sqrt(hidden_size * input_size)))
    hidden_size_med2 = Int(floor(sqrt(hidden_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.elu)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.elu)
    layer4 = Dense(hidden_size_med, hidden_size)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.elu)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet12(bn, layer1, layer2, layer3, layer4, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet12)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet12)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet12)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #
    y1, st5 = model.layer5(y, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet12
# PolicyNet13 definition
# 定义模型结构
struct PolicyNet13 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet13(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(sqrt(hidden_size * input_size)))
    hidden_size_med2 = Int(floor(sqrt(hidden_size * output_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.swish)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.swish)
    layer4 = Dense(hidden_size_med, hidden_size)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.swish)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet13(bn, layer1, layer2, layer3, layer4, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet13)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet13)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet13)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #
    y1, st5 = model.layer5(y, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet13
# PolicyNet14 definition
# 定义模型结构
struct PolicyNet14 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    ln::LayerNorm
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet14(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(sqrt(hidden_size * input_size)))
    hidden_size_med2 = Int(floor(sqrt(hidden_size * output_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.swish)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.swish)
    layer4 = Dense(hidden_size_med, hidden_size)
    ln = LayerNorm((hidden_size,), affine=false,epsilon=1f-3)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.swish)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet14(bn, layer1, layer2, layer3, layer4, ln, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet14)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    ln = Lux.initialparameters(rng, model.ln)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, ln=ln, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet14)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    ln = Lux.initialstates(rng, model.ln)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, ln=ln, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet14)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #层归一化
    lny, stln = model.ln(y, ps.ln, st.ln)
    #
    y1, st5 = model.layer5(lny, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, ln=stln, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet14
# PolicyNet15 definition
# 定义模型结构
struct PolicyNet15 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet15(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor((hidden_size+input_size)/2.0))
    hidden_size_med2 = Int(floor((hidden_size+output_size)/2.0))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.swish)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.swish)
    layer4 = Dense(hidden_size_med, hidden_size)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.swish)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet15(bn, layer1, layer2, layer3, layer4, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet15)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet15)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet15)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #
    y1, st5 = model.layer5(y, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet15
# PolicyNet16 definition
# 定义模型结构
struct PolicyNet16 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layer5::Dense
    layer6::Dense
    layer7::Dense
    layer8::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet16(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor((hidden_size+input_size)/2.0))
    hidden_size_med2 = Int(floor(sqrt(hidden_size*output_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.swish)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.swish)
    layer4 = Dense(hidden_size_med, hidden_size)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.swish)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layer7 = Dense(hidden_size_med2, hidden_size_med2, NNlib.swish)
    layer8 = Dense(hidden_size_med2, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet16(bn, layer1, layer2, layer3, layer4, layer5, layer6, layer7, layer8, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet16)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layer7 = Lux.initialparameters(rng, model.layer7)
    layer8 = Lux.initialparameters(rng, model.layer8)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layer7=layer7, layer8=layer8, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet16)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layer7 = Lux.initialstates(rng, model.layer7)
    layer8 = Lux.initialstates(rng, model.layer8)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layer7=layer7, layer8=layer8, layerf=layerf)
end

# 前向传播
function (model::PolicyNet16)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #
    y1, st5 = model.layer5(y, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y1, st7 = model.layer5(y, ps.layer7, st.layer7)
    y2, st8 = model.layer6(y, ps.layer8, st.layer8)
    y = y1 + y2    
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layer5=st5, layer6=st6, layer7=st7, layer8=st8, layerf=stf)
end
export PolicyNet16
# PolicyNet17 definition
# 定义模型结构
struct PolicyNet17 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet17(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor((hidden_size+input_size)/2.0))
    hidden_size_med2 = Int(floor(sqrt(hidden_size*output_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.swish)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.swish)
    layer4 = Dense(hidden_size_med, hidden_size)
    layer5 = Dense(hidden_size, hidden_size_med2, NNlib.swish)
    layer6 = Dense(hidden_size, hidden_size_med2)
    layerf = Dense(hidden_size_med2, output_size)
    return PolicyNet17(bn, layer1, layer2, layer3, layer4, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet17)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet17)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet17)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    #
    y1, st5 = model.layer5(y, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = y1 + y2
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet17
# PolicyNet18 definition
# 定义模型结构
struct PolicyNet18 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layer5::Dense
    layer6::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet18(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor((hidden_size+input_size)/2.0))
    hidden_size_med2 = Int(floor(sqrt(hidden_size*output_size)))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.swish)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med*2, hidden_size, NNlib.swish)
    layer4 = Dense(hidden_size_med*2, hidden_size)
    layer5 = Dense(hidden_size*2, hidden_size_med2, NNlib.swish)
    layer6 = Dense(hidden_size*2, hidden_size_med2)
    layerf = Dense(hidden_size_med2*2, output_size)
    return PolicyNet18(bn, layer1, layer2, layer3, layer4, layer5, layer6, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet18)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layer5 = Lux.initialparameters(rng, model.layer5)
    layer6 = Lux.initialparameters(rng, model.layer6)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet18)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layer5 = Lux.initialstates(rng, model.layer5)
    layer6 = Lux.initialstates(rng, model.layer6)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layer5=layer5, layer6=layer6, layerf=layerf)
end

# 前向传播
function (model::PolicyNet18)(x::AbstractArray, ps, st)
    # 通过批归一化层
    x = reshape(x, length(x), 1)
    x, stbn = model.bn(x, ps.bn, st.bn)
    x = reshape(x, length(x))
    st_bn = st.bn#bn不改变状态
    #
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = cat(y1,y2;dims=ndims(y1))
    #
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = cat(y1,y2;dims=ndims(y1))
    #
    y1, st5 = model.layer5(y, ps.layer5, st.layer5)
    y2, st6 = model.layer6(y, ps.layer6, st.layer6)
    y = cat(y1,y2;dims=ndims(y1))
    #
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layer5=st5, layer6=st6, layerf=stf)
end
export PolicyNet18
# PolicyNet19 definition
# 定义模型结构
struct PolicyNet19 <: Lux.AbstractLuxLayer
    bn::BatchNorm
    layer1::Dense
    layer2::Dense
    layer3::Dense
    layer4::Dense
    layerf::Dense
end

# 构造函数
function PolicyNet19(input_size::Int, hidden_size::Int, output_size::Int)
    hidden_size_med = Int(floor(hidden_size / 4))
    bn = BatchNorm(input_size, affine=false)
    layer1 = Dense(input_size, hidden_size_med, NNlib.gelu)
    layer2 = Dense(input_size, hidden_size_med)
    layer3 = Dense(hidden_size_med, hidden_size, NNlib.gelu)
    layer4 = Dense(hidden_size_med, hidden_size)
    layerf = Dense(hidden_size, output_size)
    return PolicyNet19(bn, layer1, layer2, layer3, layer4, layerf)
end

# 初始化参数
function Lux.initialparameters(rng::AbstractRNG, model::PolicyNet19)
    bn = Lux.initialparameters(rng, model.bn)
    layer1 = Lux.initialparameters(rng, model.layer1)
    layer2 = Lux.initialparameters(rng, model.layer2)
    layer3 = Lux.initialparameters(rng, model.layer3)
    layer4 = Lux.initialparameters(rng, model.layer4)
    layerf = Lux.initialparameters(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 初始化状态
function Lux.initialstates(rng::AbstractRNG, model::PolicyNet19)
    bn = Lux.initialstates(rng, model.bn)
    layer1 = Lux.initialstates(rng, model.layer1)
    layer2 = Lux.initialstates(rng, model.layer2)
    layer3 = Lux.initialstates(rng, model.layer3)
    layer4 = Lux.initialstates(rng, model.layer4)
    layerf = Lux.initialstates(rng, model.layerf)
    return (bn=bn, layer1=layer1, layer2=layer2, layer3=layer3, layer4=layer4, layerf=layerf)
end

# 前向传播
function (model::PolicyNet19)(x::AbstractArray, ps, st)
    # 进行批归一化(网络运算时每次只进来一个数不能统计bn，所以要预先在外面积累多个批次直接赋值)
    x = (x.-st.bn.running_mean)./(1e-5.+sqrt.(st.bn.running_var))
    st_bn = st.bn#bn不改变状态
    # 通过第一个密集层
    y1, st1 = model.layer1(x, ps.layer1, st.layer1)
    y2, st2 = model.layer2(x, ps.layer2, st.layer2)
    y = y1 + y2 #cat(y1, y2; dims=1)  # concatenate along the feature dimension
    # 通过第一个密集层
    y1, st3 = model.layer3(y, ps.layer3, st.layer3)
    y2, st4 = model.layer4(y, ps.layer4, st.layer4)
    y = y1 + y2
    # 通过第三个密集层
    y, stf = model.layerf(y, ps.layerf, st.layerf)
    return y, (bn=st_bn, layer1=st1, layer2=st2, layer3=st3, layer4=st4, layerf=stf)
end
export PolicyNet19
end