package user

import (
	"bytes"
	"encoding/json"
	"github.com/gin-gonic/gin"
	"gpt-api/framework/ginplus"
	"gpt-api/framework/log"
	"gpt-api/pkg/model"
	"io"
	"math/rand"
	"net/http"
	"sort"
	"strings"
	"time"
)

type payloads struct {
	Model string `json:"model"`

	Messages []Messages `json:"messages"`
}
type Messages struct {
	Role    string `json:"role"`
	Content string `json:"content"`
}

// getRequestText 获取请求接口的文本，要做一些清晰
//func getRequestText(h string) string {
//	// 1.去除空格以及换行
//	requestText := strings.TrimSpace(h)
//	requestText = strings.Trim(h, "\n")
//
//	// 2.获取上下文，拼接在一起，如果字符长度超出4000，截取为4000。（GPT按字符长度算），达芬奇3最大为4068，也许后续为了适应要动态进行判断。
//	sessionText := h.service.GetUserSessionContext()
//	if sessionText != "" {
//		requestText = sessionText + "\n" + requestText
//	}
//	if len(requestText) >= 4000 {
//		requestText = requestText[:4000]
//	}
//
//	// 3.检查用户发送文本是否包含结束标点符号
//	punctuation := ",.;!?，。！？、…"
//	runeRequestText := []rune(requestText)
//	lastChar := string(runeRequestText[len(runeRequestText)-1:])
//	if strings.Index(punctuation, lastChar) < 0 {
//		requestText = requestText + "？" // 判断最后字符是否加了标点，没有的话加上句号，避免openai自动补齐引起混乱。
//	}
//
//	// 4.返回请求文本
//	return requestText
//}

type openAICompletionsResp struct {
	ID      string `json:"id"`
	Object  string `json:"object"`
	Created int    `json:"created"`
	Model   string `json:"model"`
	Usage   struct {
		PromptTokens     int `json:"prompt_tokens"`
		CompletionTokens int `json:"completion_tokens"`
		TotalTokens      int `json:"total_tokens"`
	} `json:"usage"`
	Choices []struct {
		Message struct {
			Role    string `json:"role"`
			Content string `json:"content"`
		} `json:"message"`
		FinishReason interface{} `json:"finish_reason"`
		Index        int         `json:"index"`
	} `json:"choices"`
}

func random(min, max int) int {
	return rand.Intn(max-min) + min
}

func (h *Handler) Tinfoil(c *gin.Context) error {
	var (
		req struct {
			Message string `json:"message" binding:"required"`
			OpenId  string `json:"open_id"`
		}
		info model.Message
		resp []model.Message
		ms   []Messages
		mss  []Messages
	)

	if err := c.Bind(&req); err != nil {
		c.JSON(http.StatusOK, gin.H{
			"message": "pong",
		})
	}

	if req.OpenId == "" {
		Messages := Messages{
			Content: req.Message,
			Role:    "user",
		}
		ms = append(ms, Messages)
		sf := h.DoGPTRequests(ms)

		f := strings.ReplaceAll(sf, "OpenAI", "逐星")
		pline := strings.ReplaceAll(f, "ChatGPT", "逐星GPT")
		return ginplus.ReplyJSON(c, gin.H{
			"text": pline,
		})
	}

	info = model.Message{
		Content:    req.Message,
		OpenId:     req.OpenId,
		CreateTime: time.Now().Unix(),
		Role:       "user",
	}

	//插入是否有未完成的对话
	err := h.db.Model(&model.Message{}).Create(&info).Error
	if err != nil {
		return ginplus.ReplyInternalError(c, err)
	}
	err = h.db.Model(&model.Message{}).Where("open_id = ?", req.OpenId).Order("create_time desc").Limit(10).Find(&resp).Error
	if err != nil {
		return ginplus.ReplyInternalError(c, err)
	}

	sort.Slice(resp, func(i, j int) bool {
		return resp[i].ID < resp[j].ID
	})
	var str, sf string
	for _, v := range resp {
		Messages := Messages{
			Content: v.Content,
			Role:    v.Role,
		}
		str = str + v.Content
		ms = append(ms, Messages)
	}
	log.Info("ms", log.Var("ms", ms))
	//查询上下文

	if len(str) > 100 {

		Messages := Messages{
			Content: req.Message,
			Role:    "user",
		}
		mss = append(mss, Messages)

		sf = h.DoGPTRequest(mss, req.OpenId)
	} else {
		sf = h.DoGPTRequest(ms, req.OpenId)
	}
	// 回复消息

	// 回复消息：演示回复用户发送的消息
	//s := h.DoGPTRequest(ms, req.OpenId)
	f := strings.ReplaceAll(sf, "OpenAI", "逐星")
	pline := strings.ReplaceAll(f, "ChatGPT", "逐星GPT")
	return ginplus.ReplyJSON(c, gin.H{
		"text": pline,
	})
}

var token = map[int]string{
	1:  "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",
	2:  "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",
	3:  "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",
	4:  "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",
	5:  "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",
	6:  "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",
	7:  "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",
	8:  "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",
	9:  "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",
	10: "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",
	11: "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",
}

func init() {
	http.DefaultClient.Timeout = 4900 * time.Millisecond
}

func (h *Handler) DoGPTRequest(d []Messages, op string) string {
	req := newGptRequest(d)
	resp, err := doRequest(req)
	var content string
	if err != nil {
		content = err.Error()
	} else {
		content = "服务器爆满，请稍后重试"
		if len(resp.Choices) > 0 {
			content = strings.TrimSpace(resp.Choices[0].Message.Content)
			info := model.Message{
				Content:    content,
				OpenId:     op,
				CreateTime: time.Now().Unix(),
				Role:       "assistant",
			}

			//插入是否有未完成的对话
			err := h.db.Model(&model.Message{}).Create(&info).Error
			if err != nil {
				log.Error("err", log.Var("err", err))
			}

		}
	}
	return content
}

func newGptRequest(d []Messages) *http.Request {

	var p = payloads{
		Model:    "gpt-3.5-turbo",
		Messages: d,
	}
	bs, err := json.Marshal(p)
	if err != nil {
		log.Info("err", log.Var("err", err))
	}
	reader := bytes.NewReader(bs)
	//request, err := http.NewRequest(http.MethodPost, "https://api.openai.com/v1/completions", reader)
	request, err := http.NewRequest(http.MethodPost, "https://api.openai.com/v1/chat/completions", reader)
	if err != nil {
		log.Info("err", log.Var("err", err))
	}

	rand.Seed(time.Now().Unix()) //Seed生成的随机数
	myrand := random(1, 11)
	ts := token[myrand]
	log.Info("---------token", log.Var("token", ts))
	request.Header.Set("Content-Type", "application/json")
	request.Header.Set("Authorization", "Bearer "+ts)
	return request
}

func doRequest(req *http.Request) (*openAICompletionsResp, error) {
	client := http.Client{
		Timeout: 1500 * time.Second,
	}
	//client.Do(req)
	resp, err := client.Do(req)
	if err != nil {
		log.Info("err", log.Var("err", err))
		return nil, err
	}
	bs, err := io.ReadAll(resp.Body)
	log.Info("bs", log.Var("bs", string(bs)))
	if err != nil {
		log.Info("err", log.Var("err", err))
		return nil, err
	}
	var s openAICompletionsResp
	err = json.Unmarshal(bs, &s)
	if err != nil {
		log.Info("err", log.Var("err", err))
		return nil, err
	}
	return &s, nil
}

func (h *Handler) DoGPTRequests(d []Messages) string {
	req := newGptRequest(d)
	resp, err := doRequest(req)
	var content string
	if err != nil {
		content = err.Error()
	} else {
		content = "服务器爆满，请稍后重试"
		if len(resp.Choices) > 0 {
			content = strings.TrimSpace(resp.Choices[0].Message.Content)

		}
	}
	return content
}
