﻿using TorchSharp;
using static TorchSharp.torch;

namespace Qwen3.Module;

public class Qwen3DenseBlock : torch.nn.Module<Tensor, Tensor, Tensor, Tensor>
{
    private readonly Qwen3RMSNorm input_layernorm;
    private readonly Qwen3DenseAttention self_attn;
    private readonly Qwen3RMSNorm post_attention_layernorm;
    private readonly Qwen3DenseMLP mlp;

    public Qwen3DenseBlock(
        Qwen3Config config)
        : base(nameof(Qwen3DenseBlock))
    {
        var n_embed = config.HiddenSize;
        var eps = config.RmsNormEps;
        this.input_layernorm = new Qwen3RMSNorm(n_embed, eps);
        this.self_attn = new Qwen3DenseAttention(config);
        this.post_attention_layernorm = new Qwen3RMSNorm(n_embed, eps);
        this.mlp = new Qwen3DenseMLP(config);
        this.RegisterComponents();
    }

    public override Tensor forward(Tensor x, Tensor cos, Tensor sin)
    {
        using var _ = NewDisposeScope();
        x = x + this.self_attn.forward(this.input_layernorm.forward(x), cos, sin);
        x = x + this.mlp.forward(this.post_attention_layernorm.forward(x));
        return x.MoveToOuterDisposeScope();
    }
}

public class Qwen3MoEBlock : torch.nn.Module<Tensor, Tensor, Tensor, Tensor>
{
    private readonly Qwen3RMSNorm input_layernorm;
    private readonly Qwen3MoeAttention self_attn;
    private readonly Qwen3RMSNorm post_attention_layernorm;
    private readonly Qwen3DenseMLP mlp;

    public Qwen3MoEBlock(
        Qwen3Config config)
        : base(nameof(Qwen3MoEBlock))
    {
        var n_embed = config.HiddenSize;
        var eps = config.RmsNormEps;
        this.input_layernorm = new Qwen3RMSNorm(n_embed, eps);
        this.self_attn = new Qwen3MoeAttention(config);
        this.post_attention_layernorm = new Qwen3RMSNorm(n_embed, eps);
        this.mlp = new Qwen3DenseMLP(config);
        this.RegisterComponents();
    }

    public override Tensor forward(Tensor x, Tensor cos, Tensor sin)
    {
        using var _ = NewDisposeScope();
        x = x + this.self_attn.forward(this.input_layernorm.forward(x), cos, sin);
        x = x + this.mlp.forward(this.post_attention_layernorm.forward(x));
        return x.MoveToOuterDisposeScope();
    }
}