package openai

import (
	"bufio"
	"bytes"
	"encoding/json"
	"fmt"
	"gitee.com/jn-qq/big-model/common"
	"io"
	"net/http"
	"strings"
)

type OpenAI struct {
	modelInfo      common.Model
	streamEnable   bool
	temperature    float64
	topK           float64
	maxTokens      int
	historyMessage common.HistoryManager
	responseFormat string
	tools          map[string]common.ToolFunction
	toolId         string
}

// SetTemperature 设置模型温度 0~1 值越高模型约自由发挥
func (o *OpenAI) SetTemperature(temperature float64) *OpenAI {
	o.temperature = temperature
	return o
}

// SetTopK 设置模型采样温度 0~1 值越高模型越随机
func (o *OpenAI) SetTopK(topK float64) *OpenAI {
	o.topK = topK
	return o
}

// SetMaxTokens 设置最大token数
func (o *OpenAI) SetMaxTokens(maxTokens int) *OpenAI {
	o.maxTokens = maxTokens
	return o
}

// SetStream 设置是否流式返回
func (o *OpenAI) SetStream(enable bool) *OpenAI {
	o.streamEnable = enable
	return o
}

// SetMaxRounds 设置历史保存轮数
func (o *OpenAI) SetMaxRounds(maxRounds int) *OpenAI {
	o.historyMessage.SetMaxRounds(maxRounds)
	return o
}

// SetApiKey 设置更新apikey
func (o *OpenAI) SetApiKey(apiKey string) *OpenAI {
	o.modelInfo.ApiKey = apiKey
	return o
}

// SetResponseType 设置OpenAI实例的响应格式类型。
func (o *OpenAI) SetResponseType(t string) *OpenAI {
	o.responseFormat = t
	return o
}

// SetFunTools 设置函数工具
func (o *OpenAI) SetFunTools(funList ...common.ToolFunction) *OpenAI {
	if o.tools == nil {
		o.tools = make(map[string]common.ToolFunction)
	}
	for _, fun := range funList {
		o.tools[fun.GetFunctionName()] = fun
	}
	return o
}

func (o *OpenAI) SetSystem(prompt string) *OpenAI {
	o.historyMessage.AddDialogue(
		common.ChatMessage{
			Role:    "system",
			Content: prompt,
		},
	)
	return o
}

// ResetDialog 重置对话
func (o *OpenAI) ResetDialog() {
	o.historyMessage.Clear()
}

// Chat 发送对话，获取结果
func (o *OpenAI) Chat(message string) (*common.Response, error) {
	// 组装请求参数
	payload, err := o.assembleRequestParams(message)
	if err != nil {
		return nil, err
	}

	// 构建请求链接
	req, err := http.NewRequest("POST", o.modelInfo.Url, bytes.NewBuffer(payload))
	if err != nil {
		return nil, fmt.Errorf("creating request failed: %w", err)
	}

	// 设置请求头
	req.Header.Set("Content-Type", "application/json")
	req.Header.Set("Authorization", "Bearer "+o.modelInfo.ApiKey)
	if o.streamEnable {
		req.Header.Set("Accept", "text/event-stream")
	}

	var response = new(common.Response)
	response.SetTime("start")

	// 发送请求
	client := &http.Client{}
	resp, err := client.Do(req)
	if err != nil {
		return nil, fmt.Errorf("sending request failed: %w", err)
	}
	defer resp.Body.Close()

	// 处理响应
	// 在 SendChat 方法中增加错误包装
	if resp.StatusCode != http.StatusOK {
		_, err = io.ReadAll(resp.Body)
		return nil, fmt.Errorf("API request failed (status %d): %w", resp.StatusCode, err)
	}

	// 提取响应结果
	if o.streamEnable {
		// 流式读取处理
		reader := bufio.NewReader(resp.Body)
		for {
			line, err := reader.ReadString('\n')
			if err != nil {
				if err == io.EOF {
					break
				}
				return nil, fmt.Errorf("reading stream failed: %w", err)
			}

			line = strings.TrimSpace(line)
			if !strings.HasPrefix(line, "data:") {
				continue
			} else {
				line = strings.TrimSpace(line[5:])
			}
			response.SetTime("first")

			// 处理结束标记
			if strings.HasSuffix(line, "[DONE]") {
				break
			}

			// 解析 JSON 数据
			var chunk common.ChatResponse
			if err := json.Unmarshal([]byte(line), &chunk); err != nil {
				return nil, fmt.Errorf("unmarshal failed: %w", err)
			}

			// 统一化输出
			response.Update(chunk)
		}
	} else {
		// 解析 JSON 数据
		var chunk common.ChatResponse
		if err := json.NewDecoder(resp.Body).Decode(&chunk); err != nil {
			return nil, fmt.Errorf("decoding response failed: %w", err)
		}
		// 统一化输出
		response.Update(chunk)
	}

	// 缓存历史信息
	o.historyMessage.AddDialogue(
		common.ChatMessage{
			Role:      "assistant",
			Content:   response.GetAnswer(),
			ToolCalls: response.GetTools(),
		},
	)
	if response.GetTools() != nil {
		for _, tool := range response.GetTools() {
			toolAnswer := o.tools[tool.Function.Name].Execute(tool.Function.Arguments)
			o.toolId = tool.Id
			toolsResponse, err := o.Chat(toolAnswer)
			if err != nil {
				return nil, err
			}
			return toolsResponse, nil
		}
	}
	response.SetTime("end")

	return response, nil
}

// 请求参数结构体（根据实际 API 文档调整）
type chatRequest struct {
	Model    string               `json:"model"`
	Messages []common.ChatMessage `json:"messages"`
	// 以下参数根据实际需要添加
	MaxTokens      int               `json:"max_tokens,omitempty"`
	Temperature    float64           `json:"temperature,omitempty"`
	Stream         bool              `json:"stream,omitempty"`
	ResponseFormat map[string]string `json:"response_format,omitempty"`
	Tools          []common.Tool     `json:"tools,omitempty"`
	TopK           float64           `json:"top_k,omitempty"`
}

// 组装入参
func (o *OpenAI) assembleRequestParams(message string) ([]byte, error) {
	// 将模型公共参数复制
	var params = chatRequest{
		Model: o.modelInfo.ModelType,
		// 组装历史信息
		Messages:    o.historyMessage.GetDialogue(),
		MaxTokens:   o.maxTokens,
		Temperature: o.temperature,
		Stream:      o.streamEnable,
	}
	if o.responseFormat != "" {
		params.ResponseFormat = map[string]string{"type": o.responseFormat}
	}
	if o.topK > 0 {
		params.TopK = o.topK
	}

	if o.tools != nil && len(o.tools) > 0 {
		for _, tool := range o.tools {
			params.Tools = append(params.Tools, tool.GetFunctionObject())
		}
	}

	// 组装用户消息
	var dialogue common.ChatMessage
	if o.toolId != "" {
		dialogue = common.ChatMessage{Role: "tool", Content: message, ToolCallId: o.toolId}
		o.toolId = ""
	} else {
		dialogue = common.ChatMessage{Role: "user", Content: message}
	}
	params.Messages = append(params.Messages, dialogue)

	// 多轮对话保留历史信息
	o.historyMessage.AddDialogue(dialogue)

	// json序列化
	return json.Marshal(params)
}
