def calculate_transformer_params(d_model, n_heads, n_layers, mlp_ratio):
    # 计算多头自注意力机制的参数量
    head_dim = d_model // n_heads
    self_attention_params = 4 * (d_model * head_dim) * n_heads
    
    # 计算前馈神经网络的参数量
    mlp_hidden_dim = int(mlp_ratio * d_model)
    feed_forward_params = 2 * (d_model * mlp_hidden_dim)
    
    # 每个编码器层的总参数量
    encoder_layer_params = self_attention_params + feed_forward_params + 3 * d_model  # 加上LayerNorm的参数
    
    # 所有编码器层的总参数量
    total_encoder_params = encoder_layer_params * n_layers
    
    return total_encoder_params

# 示例调用
if __name__ == "__main__":
    d_model = 4096 #2048
    n_heads = 64 # 16
    n_layers = 32 # 16
    mlp_ratio = 8 # 4

    total_params = calculate_transformer_params(d_model, n_heads, n_layers, mlp_ratio)
    print(f"Total number of parameters in the Transformer model: {total_params}")
