package v1

import (
	"bytes"
	"context"
	"encoding/json"
	"errors"
	"flag"
	"fmt"
	"gin-demo/pkg/logging"
	"gin-demo/pkg/setting"
	"gin-demo/pkg/util"
	"github.com/gin-gonic/gin"
	"github.com/sashabaranov/go-openai"
	"io"
	"io/ioutil"
	"log"
	"net/http"
	"net/url"
	"time"
)

// 对接 chatgpt3.5 的api
var (
	msgFlag2 string // 不知道这个干啥的
)

// 初始化
func init() {
	flag.StringVar(&msgFlag2, "msg2", "", "文本消息")
}

/**
介绍：
role  	   含义
system     表示可以设置机器人的人设
assistant  表示机器人回复内容
user	   表示用户提问问题
*/

// 请求
// 参数介绍具体查看文档：https://platform.openai.com/docs/api-reference/chat/create
type Chat35Request struct {
	Model            string                   `json:"model"`
	Messages         []map[string]interface{} `json:"messages"` // 要发送的消息体
	MaxTokens        int                      `json:"max_tokens"`
	FrequencyPenalty float32                  `json:"frequency_penalty"`
	Temperature      float32                  `json:"temperature"`
	PresencePenalty  float32                  `json:"presence_penalty"`
	Stream           bool                     `json:"stream"` // 是否以流的形式返回
}

// 初始化请求结构体
func newChat35Request() *Chat35Request {
	return &Chat35Request{
		Model:            "gpt-3.5-turbo",
		MaxTokens:        3000,
		Temperature:      1.0,
		FrequencyPenalty: 0,
		PresencePenalty:  0,
		Stream:           false,
	}
}

// 响应
type Chat35Response struct {
	Id      string `json:"id"`
	Object  string `json:"object"`
	Created int    `json:"created"`
	Model   string `json:"model"`
	Choices []struct {
		Index   int `json:"index"`
		Message struct {
			Role    string `json:"role"`
			Content string `json:"content"`
		} `json:"message"`
	} `json:"choices"`
	Usage struct {
		PromptTokens     int `json:"prompt_tokens"`
		CompletionTokens int `json:"completion_tokens"`
		TotalTokens      int `json:"total_tokens"`
	} `json:"usage"`
}

func ChatGPT35Request(c *gin.Context) {
	// 打印配置
	log.Println("chatgpt3_5的配置：", setting.ChatGPT35Conf)

	message := c.Query("message")
	systemContent := c.DefaultQuery("system_content", "")
	// streamStr := c.DefaultQuery("stream", "0")

	returnObj := util.NewResponseSuccess()

	if message == "" {
		returnObj.Code = 422
		returnObj.Msg = "message参数不能为空"
		util.ResponseSuccess(c, returnObj)
		return
	}

	if setting.ChatGPT3Conf.API_KEY == "" {
		returnObj.Code = 422
		returnObj.Msg = "apikey不能为空"
		util.ResponseSuccess(c, returnObj)
		return
	}
	// 请求参数
	reqParams := newChat35Request()
	if systemContent != "" {
		systemMap := make(map[string]interface{})
		systemMap["role"] = "system"
		systemMap["content"] = systemContent
		reqParams.Messages = append(reqParams.Messages, systemMap)
	}
	messageMap := make(map[string]interface{})
	messageMap["role"] = "user"
	messageMap["content"] = message
	reqParams.Messages = append(reqParams.Messages, messageMap)

	// 此方法不支持流的方式
	reqParams.Stream = false

	// 请求体转成json
	// 打印请求参数
	reqJson, err := json.Marshal(&reqParams)
	logging.Info("请求参数:", string(reqJson))
	if err != nil {
		returnObj.Code = 422
		returnObj.Msg = "请求体转成json失败：" + err.Error()
		util.ResponseSuccess(c, returnObj)
		return
	}

	// 创建一个http
	req, err := http.NewRequest("POST", setting.ChatGPT35Conf.END_POINT, bytes.NewReader(reqJson))
	if err != nil {
		return
	}
	req.Header.Add("Content-Type", "application/json")
	req.Header.Add("Authorization", "Bearer "+setting.ChatGPT3Conf.API_KEY)
	req.Header.Add("User-Agent", "chatGPT/1 CFNetwork/1402.0.8 Darwin/22.2.0")
	req.Header.Add("Accept-encoding", "gzip, deflate, br")
	req.Header.Add("Accept-language", "zh-CN,zh-Hans;q=0.9")
	// 代理地址

	client := &http.Client{
		Timeout: 300 * time.Second,
		// Transport: &http.Transport{
		// 	Proxy: http.ProxyURL(proxyAddress),
		// },
	}
	if setting.ChatGPT3Conf.PROXY != "" {
		proxyAddress, _ := url.Parse(setting.ChatGPT3Conf.PROXY)
		client.Transport = &http.Transport{
			Proxy: http.ProxyURL(proxyAddress),
		}
	}
	resp, err2 := client.Do(req)
	if err2 != nil {
		returnObj.Code = 422
		returnObj.Msg = "client.Do失败：" + err2.Error()
		util.ResponseSuccess(c, returnObj)
		return
	}

	readRet, err3 := ioutil.ReadAll(resp.Body)
	if err3 != nil {
		returnObj.Code = 422
		returnObj.Msg = "ioutil.ReadAll失败：" + err3.Error()
		util.ResponseSuccess(c, returnObj)
		return
	}
	// 打印返回日志
	log.Printf("result:%s", readRet)
	logging.Info("返回值：", string(readRet))
	defer resp.Body.Close()

	chatResp := &Chat35Response{}
	err = json.Unmarshal(readRet, &chatResp)
	if err != nil {
		returnObj.Code = 422
		returnObj.Msg = "json.Unmarshal失败：" + err2.Error()
		util.ResponseSuccess(c, returnObj)
		return
	}

	// 数据返回
	returnObj.Data = chatResp
	util.ResponseSuccess(c, returnObj)
}

type MessageInput struct {
	Message       string `json:"message"`
	SystemContent string `json:"system_content"`
}

// 用stream的方式
func ChatGPT35StreamRequest(c *gin.Context) {
	message := c.Query("message") // 是个json
	// systemContent := c.DefaultQuery("system_content", "")

	returnObj := util.NewResponseSuccess()

	msgInput := &MessageInput{}

	errInput := json.Unmarshal([]byte(message), msgInput)
	if errInput != nil {
		returnObj.Code = 422
		returnObj.Msg = "请求参数发生了错误"
		util.ResponseSuccess(c, returnObj)
		return
	}
	if msgInput.Message == "" {
		returnObj.Code = 422
		returnObj.Msg = "message参数不能为空"
		util.ResponseSuccess(c, returnObj)
		return
	}

	if setting.ChatGPT3Conf.API_KEY == "" {
		returnObj.Code = 422
		returnObj.Msg = "apikey不能为空"
		util.ResponseSuccess(c, returnObj)
		return
	}
	// 聊天请求参数
	chatMessage := make([]openai.ChatCompletionMessage, 0)
	reqParams := newChat35Request()
	if msgInput.SystemContent != "" {
		systemMap := openai.ChatCompletionMessage{}
		systemMap.Role = "system"
		systemMap.Content = msgInput.SystemContent
		chatMessage = append(chatMessage, systemMap)
	}
	messageMap := openai.ChatCompletionMessage{}
	messageMap.Role = "user"
	messageMap.Content = msgInput.Message
	chatMessage = append(chatMessage, messageMap)

	// 以流的方式返回
	reqParams.Stream = true

	//clientObj := openai.NewClient(setting.ChatGPT35Conf.API_KEY)

	// 自定义配置
	defaultConfig := openai.DefaultConfig(setting.ChatGPT35Conf.API_KEY)
	defaultConfig.BaseURL = setting.ChatGPT35Conf.END_POINT

	fmt.Printf("defaultConfig.BaseUrl: %s\n", defaultConfig.BaseURL)

	clientObj := openai.NewClientWithConfig(defaultConfig)

	// 如果有代理，需要走代理
	if setting.ChatGPT35Conf.PROXY != "" {
		config := openai.DefaultConfig(setting.ChatGPT35Conf.API_KEY)
		proxyUrl, err := url.Parse(setting.ChatGPT35Conf.PROXY)
		if err != nil {
			panic(err)
		}
		transport := &http.Transport{
			Proxy: http.ProxyURL(proxyUrl),
		}
		config.HTTPClient = &http.Client{
			Transport: transport,
		}
		clientObj = openai.NewClientWithConfig(config)
	}

	ctx := context.Background()
	// open ai 请求参数
	req := openai.ChatCompletionRequest{
		Model:            reqParams.Model,
		MaxTokens:        reqParams.MaxTokens,
		Temperature:      reqParams.Temperature,
		FrequencyPenalty: reqParams.FrequencyPenalty,
		PresencePenalty:  reqParams.PresencePenalty,
		Stream:           reqParams.Stream,
		Messages:         chatMessage,
	}

	reqJson, _ := json.Marshal(req)
	log.Printf("请求参数：%s", string(reqJson))
	logging.Info("请求参数：" + string(reqJson))

	stream, err := clientObj.CreateChatCompletionStream(ctx, req)
	if err != nil {
		returnObj.Code = 422
		returnObj.Msg = "CreateChatCompletionStream发生了错误：" + err.Error()
		util.ResponseSuccess(c, returnObj)
		return
	}
	defer stream.Close()

	// 返回值用字符串拼接
	// resultStr := ""
	// resultChan := make(chan string , 100)

	// 处理流数据
	chanStream := make(chan string, 100)
	go func() {
		defer stream.Close()
		defer close(chanStream)

		for {
			// 循环接收流信息
			response, err := stream.Recv()
			// stream 停止
			if errors.Is(err, io.EOF) {
				fmt.Println("Stream finished")
				chanStream <- "<!finish>"
				return
			}
			// stream 报错
			if err != nil {
				fmt.Printf("Stream error: %v\n", err)
				chanStream <- "<!error>"
				return
			}
			// 也表示停止
			if len(response.Choices) == 0 {
				fmt.Println("Stream finished")
				chanStream <- "<!finish>"
				return
			}
			data, _ := json.Marshal(response.Choices[0])
			chanStream <- string(data)

			responseChoice := response.Choices[0].Delta.Content
			// fmt.Printf("Stream response: %s\n", responseChoice)
			fmt.Println("content:" + responseChoice)

			logging.Info("返回信息：" + string(data))
		}
	}()

	// 返回流处理
	c.Stream(func(w io.Writer) bool {
		// 从channel 读取流数据
		if msg, ok := <-chanStream; ok {
			if msg == "<!finish>" {
				fmt.Println("message: finish mark")
				c.SSEvent("stop", "finish")
				return false
			}
			if msg == "<!error>" {
				fmt.Println("message: error mark")
				c.SSEvent("stop", "error")
				return false
			}
			c.SSEvent("message", msg)

			// fmt.Printf("message: %v\n", msg)

			return true
		}
		return false
	})
}
