package model

import (
	"context"
	"errors"
	"io"
	"log"

	"agent.com/iagentgo/facade"
	"agent.com/iagentgo/infra"
	"github.com/cloudwego/eino-ext/components/model/ark"
	"github.com/cloudwego/eino-ext/components/model/deepseek"
	"github.com/cloudwego/eino-ext/components/model/openai"
	"github.com/cloudwego/eino-ext/components/model/qwen"
	eModel "github.com/cloudwego/eino/components/model"
	"github.com/cloudwego/eino/schema"
)

type ModelBridge struct {
	mds      *ModelStyle
	bigModel map[string]eModel.ToolCallingChatModel
}

type ModelStreamOut struct {
	facade.ChatStreamOut
	out *schema.StreamReader[*schema.Message]
}

var modelBridge *ModelBridge

func NewModelBridge() *ModelBridge {
	mbg := new(ModelBridge)
	mbg.init()
	modelBridge = mbg
	return mbg
}

func GetModelBridge() *ModelBridge {
	return modelBridge
}

func (b *ModelBridge) init() {
	b.mds = newModelStyle()
	b.bigModel = make(map[string]eModel.ToolCallingChatModel)
	b.initModel()
}

func (b *ModelBridge) Chat(style string, request *facade.ChatRequest) (*facade.ChatResponse, error) {
	cm, ok := b.bigModel[request.ModelId]
	if !ok {
		return nil, errors.New("model not found")
	}

	in := b.mds.getMessage(request.Role, style, request)
	out, err := cm.Generate(context.Background(), in)
	if err != nil {
		return nil, err
	}
	return &facade.ChatResponse{Answer: out.Content}, nil
}

func (b *ModelBridge) ChatStream(style string, request *facade.ChatRequest) (facade.ChatStreamOut, error) {
	cm, ok := b.bigModel[request.ModelId]
	if !ok {
		return nil, errors.New("model not found")
	}
	in := b.mds.getMessage(request.Role, style, request)
	out, err := cm.Stream(context.Background(), in)
	if err != nil {
		return nil, err
	}
	mout := &ModelStreamOut{
		out: out,
	}
	return mout, nil
}

func (b *ModelBridge) initModel() {
	if infra.GetB("openai.enable") {
		b.bigModel[facade.OPENAI] = newOpenAiModle()
	}
	if infra.GetB("deepseek.enable") {
		b.bigModel[facade.DEEPSEEK] = newDeepSeekModle()
	}
	if infra.GetB("qwen.enable") {
		b.bigModel[facade.QWEN] = newQwenModle()
	}

	if infra.GetB("ark.enable") {
		b.bigModel[facade.ARK] = newArkModle()
	}
}

func newOpenAiModle() eModel.ToolCallingChatModel {
	log.Println("init openai model")
	ctx := context.Background()
	chatModel, err := openai.NewChatModel(ctx, &openai.ChatModelConfig{
		APIKey:      infra.GetS("openai.api_key", ""),
		Model:       infra.GetS("openai.model", ""),
		BaseURL:     infra.GetS("openai.base_url", ""),
		MaxTokens:   of(infra.GetI("openai.max_tokens", 0)),
		Temperature: of(infra.GetF("openai.temperature", 0)),
		TopP:        of(infra.GetF("openai.top_p", 0)),
	})
	if err != nil {
		log.Fatalf("create chat model failed: %v\n", err)
	}
	return chatModel
}

func newDeepSeekModle() eModel.ToolCallingChatModel {
	log.Println("init deepseek model")
	ctx := context.Background()
	cm, err := deepseek.NewChatModel(ctx, &deepseek.ChatModelConfig{
		APIKey:      infra.GetS("deepseek.api_key", ""),
		Model:       infra.GetS("deepseek.model", ""),
		MaxTokens:   infra.GetI("deepseek.max_tokens", 0),
		Temperature: infra.GetF("deepseek.temperature", 0),
		TopP:        infra.GetF("deepseek.top_p", 0),
		BaseURL:     infra.GetS("deepseek.base_url", ""),
	})
	if err != nil {
		log.Fatal(err)
	}
	return cm
}

func newQwenModle() eModel.ToolCallingChatModel {
	log.Println("init qwen model")
	ctx := context.Background()
	cm, err := qwen.NewChatModel(ctx, &qwen.ChatModelConfig{
		BaseURL:     infra.GetS("qwen.base_url", ""),
		APIKey:      infra.GetS("qwen.api_key", ""),
		Model:       infra.GetS("qwen.model", ""),
		MaxTokens:   of(infra.GetI("qwen.max_tokens", 0)),
		Temperature: of(infra.GetF("qwen.temperature", 0)),
		TopP:        of(infra.GetF("qwen.top_p", 0)),
	})
	if err != nil {
		log.Fatalf("NewChatModel of qwen failed, err=%v", err)
	}
	return cm
}

func newArkModle() eModel.ToolCallingChatModel {
	log.Println("init ark model")
	ctx := context.Background()
	// Get ARK_API_KEY and ARK_MODEL_ID: https://www.volcengine.com/docs/82379/1399008
	chatModel, err := ark.NewChatModel(ctx, &ark.ChatModelConfig{
		APIKey: infra.GetS("ark.api_key", ""),
		Model:  infra.GetS("ark.model", ""),
	})
	if err != nil {
		log.Printf("NewChatModel failed, err=%v", err)
	}
	return chatModel
}

// func newArkBotModle() eModel.ToolCallingChatModel {
// 	log.Println("init ark bot model")
// 	ctx := context.Background()
// 	chatModel, err := arkbot.NewChatModel(ctx, &arkbot.Config{
// 		APIKey: infra.GetS("ark_bot.api_key", ""),
// 		Model:  infra.GetS("ark_bot.model", ""),
// 	})
// 	if err != nil {
// 		log.Printf("NewChatModel failed, err=%v", err)
// 	}
// 	return chatModel
// }

func (ms *ModelStreamOut) Recv(out facade.ChatSendOut) error {
	defer ms.out.Close()
	chunks := make([]*schema.Message, 0)
	for {
		msg, err := ms.out.Recv()
		if err == io.EOF {
			break
		}
		if err != nil {
			return err
		}
		if msg == nil {
			continue
		}
		chunks = append(chunks, msg)
		if msg.ResponseMeta.FinishReason != "" {
			log.Printf("FinishReason:%s\n", msg.ResponseMeta.FinishReason)
			msg, err = schema.ConcatMessages(chunks)
			chunks = make([]*schema.Message, 0)
			if err != nil {
				log.Fatalf("ConcatMessages failed, err=%v", err)
			} else {
				rsp := &facade.ChatResponse{
					Answer: msg.Content,
				}
				err = out.Send(rsp)
				if err != nil {
					return err
				}
			}
		}
	}
	if len(chunks) > 0 {
		msg, err := schema.ConcatMessages(chunks)
		if err != nil {
			log.Fatalf("ConcatMessages failed, err=%v", err)
		} else {
			rsp := &facade.ChatResponse{
				Answer: msg.Content,
			}
			err = out.Send(rsp)
			if err != nil {
				return err
			}
		}
	}
	return io.EOF
}

func of[T any](t T) *T {
	return &t
}
