package openai

import (
	"context"
	"encoding/json"
	"errors"
	"log/slog"
	"quipus/internal/llm"

	mark3labs "github.com/mark3labs/mcp-go/mcp"
	oai "github.com/openai/openai-go"
	"github.com/openai/openai-go/option"
	"github.com/openai/openai-go/shared"
)

type OpenAiClient struct {
	client *oai.Client
	model  string
}

type OpenAiAuth struct {
	SecretKey string `json:"secret_key"`
}

func NewClient(config *llm.LLMClientConfig) (*OpenAiClient, error) {
	//
	authInfo := OpenAiAuth{}
	err := json.Unmarshal([]byte(config.AuthInfo), &authInfo)
	if err != nil {
		return nil, err
	}

	client := oai.NewClient(
		option.WithAPIKey(authInfo.SecretKey),
		option.WithBaseURL(config.BaseUrl),
	)

	return &OpenAiClient{
		client: client,
		model:  config.Model,
	}, nil
}

func (c *OpenAiClient) Chat(ctx context.Context, req *llm.ChatReq) error {
	if req == nil {
		return nil
	}

	model := req.Model
	msgs := req.Msgs

	if len(msgs) < 1 {
		return errors.New("message is null")
	}

	if len(model) < 1 {
		model = c.model
	}

	messages := []oai.ChatCompletionMessageParamUnion{}
	for _, m := range msgs {
		if m.Role == llm.RoleUser {
			messages = append(messages, oai.UserMessage(m.Content))
		} else if m.Role == llm.RoleSys {
			messages = append(messages, oai.SystemMessage(m.Content))
		}
	}
	stream := c.client.Chat.Completions.NewStreaming(ctx, oai.ChatCompletionNewParams{
		Messages: oai.F(messages),
		Seed:     oai.Int(0),
		Model:    oai.F(c.model),
	})

	for stream.Next() {
		evt := stream.Current()
		print(evt.Choices[0].Delta.Content)
		for _, choice := range evt.Choices {
			select {
			case req.RspChan <- choice.Delta.Content:
			case <-ctx.Done():
				slog.Info("open ai recv quit signal ")
				return nil
			}
		}

	}

	if err := stream.Err(); err != nil {
		return err
	}
	return nil
}

func (c *OpenAiClient) GetProviderName() string {
	return "openai"
}

func (c *OpenAiClient) Embedding(ctx context.Context, req *llm.EmbeddingReq) ([]float64, error) {
	model := req.Model
	if len(model) < 1 {
		model = c.model
		if len(model) < 1 {
			return nil, errors.New("model is null")
		}
	}

	response, err := c.client.Embeddings.New(ctx, oai.EmbeddingNewParams{
		Input:      oai.F[oai.EmbeddingNewParamsInputUnion](shared.UnionString(req.Prompt)),
		Model:      oai.F(c.model),
		Dimensions: oai.Int(1024),
	})
	if err != nil {
		return nil, err
	}

	return response.Data[0].Embedding, nil
}

func (c *OpenAiClient) Complete(ctx context.Context, req *llm.CompleteReq) (llm.Message, error) {
	if req == nil {
		return nil, nil
	}

	model := req.Model
	msgs := req.Msgs

	if len(msgs) < 1 {
		return nil, errors.New("message is null")
	}

	if len(model) < 1 {
		model = c.model
	}

	messages := []oai.ChatCompletionMessageParamUnion{}
	for _, m := range msgs {
		var msg oai.ChatCompletionMessage
		if m.Role == llm.RoleUser {
			msg = oai.ChatCompletionMessage{Role: oai.ChatCompletionMessageRole(oai.ChatCompletionUserMessageParamRoleUser), Content: m.Content}

		} else if m.Role == llm.RoleSys {
			msg = oai.ChatCompletionMessage{Role: oai.ChatCompletionMessageRole(oai.ChatCompletionSystemMessageParamRoleSystem), Content: m.Content}
		}
		messages = append(messages, msg)
	}

	params := oai.ChatCompletionNewParams{
		Messages: oai.F(messages),
		Model:    oai.F(c.model),
		Seed:     oai.Int(0),
	}

	if req.Tools != nil && len(req.Tools) > 0 {
		params.Tools = oai.F(convertTool(req.Tools))
	}

	completion, err := c.client.Chat.Completions.New(ctx, params)
	if err != nil {
		return nil, err
	}

	params.Messages.Value = append(params.Messages.Value, completion.Choices[0].Message)

	completion, err = c.client.Chat.Completions.New(ctx, params)
	if err != nil {
		return nil, err
	}
	return &OpenAIMessage{Resp: completion, Choice: &completion.Choices[0]}, nil
}

// copy from mark3labs mcp host
func convertTool(tools []mark3labs.Tool) []oai.ChatCompletionToolParam {
	var oaiTools []oai.ChatCompletionToolParam
	for _, tool := range tools {
		t := oai.ChatCompletionToolParam{
			Type: oai.F(oai.ChatCompletionToolTypeFunction),
			Function: oai.F(oai.FunctionDefinitionParam{
				Name:        oai.String(tool.Name),
				Description: oai.String(tool.Description),
				Parameters:  oai.F(convertSchema(tool.InputSchema)),
			}),
		}

		oaiTools = append(oaiTools, t)
	}

	return oaiTools
}

func convertSchema(schema mark3labs.ToolInputSchema) oai.FunctionParameters {
	// Ensure required is a valid array, defaulting to empty if nil
	required := schema.Required
	if required == nil {
		required = []string{}
	}

	return oai.FunctionParameters{
		"type":       schema.Type,
		"properties": schema.Properties,
		"required":   required,
	}
}

type OpenAIMessage struct {
	Resp   *oai.ChatCompletion
	Choice *oai.ChatCompletionChoice
}

func (m *OpenAIMessage) GetRole() string {
	return string(m.Choice.Message.Role)
}

func (m *OpenAIMessage) GetContent() string {
	return m.Choice.Message.Content
}

func (m *OpenAIMessage) GetToolCalls() []llm.ToolCall {
	var calls []llm.ToolCall
	for _, call := range m.Choice.Message.ToolCalls {
		calls = append(calls, &ToolCallWrapper{call})
	}
	return calls
}

func (m *OpenAIMessage) IsToolResponse() bool {
	return m.Choice.Message.ToolCalls != nil && len(m.Choice.Message.ToolCalls) > 0
}

func (m *OpenAIMessage) GetToolResponseID() string {
	return m.Choice.Message.ToolCalls[0].ID
}

func (m *OpenAIMessage) GetUsage() (int, int) {
	return int(m.Resp.Usage.CompletionTokens), int(m.Resp.Usage.PromptTokens)
}

type ToolCallWrapper struct {
	Call oai.ChatCompletionMessageToolCall
}

func (t *ToolCallWrapper) GetID() string {
	return t.Call.ID
}

func (t *ToolCallWrapper) GetName() string {
	return t.Call.Function.Name
}

func (t *ToolCallWrapper) GetArguments() map[string]interface{} {
	var args map[string]interface{}
	if err := json.Unmarshal([]byte(t.Call.Function.Arguments), &args); err != nil {
		return make(map[string]interface{})
	}
	return args
}
