﻿using Qwen3.Module;
using System.Text;
using Tokenizers.DotNet;
using TorchSharp;
using static TorchSharp.torch;

namespace Qwen3;

public class Qwen3DensePipeline
{
    private readonly Qwen3Tokenizer _tokenizer;
    private readonly string _device;
    private readonly Qwen3Dense _model;

    public Qwen3DensePipeline(Qwen3Dense model, Qwen3Tokenizer tokenizer, string device)
    {
        _tokenizer = tokenizer;
        _device = device;
        _model = model;
    }

    public IEnumerable<string> GenerateStreaming(
        string prompt,
        List<int> stopTokens,
        int maxLen = 1024,
        float temperature = 0.7f,
        float topP = 0.9f)
    {
        using (torch.no_grad())
        {
            var inputIds = _tokenizer.Encode(prompt);
            var inputTensor = torch.tensor(inputIds.ToArray(), dtype: ScalarType.Int64, device: this._device).unsqueeze(0);
            var streamer = new TextStreamer(_tokenizer);
            var generation = _model.GenerateStream(inputTensor, maxLen, stopTokens, streamer);
            foreach (var tokenId in generation)
            {
                // 从流式处理器获取处理好的文本片段
                var textFragment = streamer.GetLatestText();
                if (!string.IsNullOrEmpty(textFragment))
                {
                    yield return textFragment;
                }
            }

            // 获取最后剩余的文本
            var finalText = streamer.Flush();
            if (!string.IsNullOrEmpty(finalText))
            {
                yield return finalText;
            }
        }
    }

    public string Generate(
        string prompt,
        List<int> stopTokens,
        int maxLen = 1024,
        float temperature = 0.7f,
        float topP = 0.9f)
    {
        using (torch.no_grad())
        {
            var inputIds = _tokenizer.Encode(prompt);
            var inputTensor = torch.tensor(inputIds.ToArray(), dtype: ScalarType.Int64, device: this._device).unsqueeze(0);
            var generation = _model.Generate(inputTensor, maxLen, stopTokens);
            var inputLength = inputIds.Count;
            var responseIds = generation[torch.TensorIndex.Colon, torch.TensorIndex.Slice(inputLength, null)];
            var responseIdList = responseIds[0].to(torch.int32).data<int>().ToList();
            var response = _tokenizer.Decode(responseIdList);
            return response;
        }
    }
}


public class Qwen3MoEPipeline
{
    private readonly Qwen3Tokenizer _tokenizer;
    private readonly string _device;
    private readonly Qwen3MoE _model;

    public Qwen3MoEPipeline(Qwen3MoE model, Qwen3Tokenizer tokenizer, string device)
    {
        model.eval();
        _tokenizer = tokenizer;
        _device = device;
        _model = model;
    }

    public IEnumerable<string> GenerateStreaming(
        string prompt,
        List<int> stopTokens,
        int maxLen = 1024,
        float temperature = 0.7f,
        float topP = 0.9f)
    {
        using (torch.no_grad())
        {
            var inputIds = _tokenizer.Encode(prompt);
            var inputTensor = torch.tensor(inputIds.ToArray(), dtype: ScalarType.Int64, device: this._device).unsqueeze(0);
            var streamer = new TextStreamer(_tokenizer);

            var generation = _model.GenerateStream(inputTensor, maxLen, stopTokens, streamer);
            foreach (var tokenId in generation)
            {
                // 从流式处理器获取处理好的文本片段
                var textFragment = streamer.GetLatestText();
                if (!string.IsNullOrEmpty(textFragment))
                {
                    yield return textFragment;
                }
            }

            // 获取最后剩余的文本
            var finalText = streamer.Flush();
            if (!string.IsNullOrEmpty(finalText))
            {
                yield return finalText;
            }
        }
    }

    public string Generate(
        string prompt,
        List<int> stopTokens,
        int maxLen = 1024,
        float temperature = 0.7f,
        float topP = 0.9f)
    {
        using (torch.no_grad())
        {
            var inputIds = _tokenizer.Encode(prompt);
            var inputTensor = torch.tensor(inputIds.ToArray(), dtype: ScalarType.Int64, device: this._device).unsqueeze(0);
            var generation = _model.Generate(inputTensor, maxLen, stopTokens);
            var inputLength = inputIds.Count;
            var responseIds = generation[torch.TensorIndex.Colon, torch.TensorIndex.Slice(inputLength, null)];
            var responseIdList = responseIds[0].to(torch.int32).data<int>().ToList();
            var response = _tokenizer.Decode(responseIdList);
            return response;
        }  
    }
}

public class TextStreamer
{
    private readonly Qwen3Tokenizer _tokenizer;
    private readonly bool _skipPrompt;
    private readonly List<int> _tokenCache = new List<int>();
    private int _printLen;
    private bool _nextTokensArePrompt = true;
    private readonly StringBuilder _textBuffer = new StringBuilder();

    public TextStreamer(Qwen3Tokenizer tokenizer, bool skipPrompt = false)
    {
        _tokenizer = tokenizer;
        _skipPrompt = skipPrompt;
    }

    public void Put(int token)
    {
        if (_skipPrompt && _nextTokensArePrompt)
        {
            _nextTokensArePrompt = false;
            return;
        }

        _tokenCache.Add(token);
        string text = _tokenizer.Decode(_tokenCache);

        string printableText = null;

        if (text.EndsWith("\n"))
        {
            printableText = text[_printLen..];
            _tokenCache.Clear();
            _printLen = 0;
        }
        else if (text.Length > 0 && IsChineseChar(text[^1]))
        {
            printableText = text[_printLen..];
            _printLen = text.Length;
        }
        else
        {
            int lastSpaceIndex = text.LastIndexOf(' ');
            if (lastSpaceIndex >= _printLen)
            {
                printableText = text.Substring(_printLen, lastSpaceIndex - _printLen + 1);
                _printLen = lastSpaceIndex + 1;
            }
        }

        if (printableText != null)
        {
            _textBuffer.Append(printableText);
        }
    }

    public string GetLatestText()
    {
        if (_textBuffer.Length == 0)
            return null;

        string text = _textBuffer.ToString();
        _textBuffer.Clear();
        return text;
    }

    public string Flush()
    {
        if (_tokenCache.Count > 0)
        {
            string text = _tokenizer.Decode(_tokenCache);
            string printableText = text[_printLen..];
            _tokenCache.Clear();
            _printLen = 0;
            return printableText;
        }

        _nextTokensArePrompt = true;
        return null;
    }

    private static bool IsChineseChar(char c)
    {
        int cp = c;
        return (cp >= 0x4E00 && cp <= 0x9FFF) ||
               (cp >= 0x3400 && cp <= 0x4DBF) ||
               (cp >= 0x20000 && cp <= 0x2A6DF) ||
               (cp >= 0x2A700 && cp <= 0x2B73F) ||
               (cp >= 0x2B740 && cp <= 0x2B81F) ||
               (cp >= 0x2B820 && cp <= 0x2CEAF) ||
               (cp >= 0xF900 && cp <= 0xFAFF) ||
               (cp >= 0x2F800 && cp <= 0x2FA1F);
    }
}
