package openai

import (
	"chatgpt/helper/code"
	"context"
	"errors"
	"fmt"
	gogpt "github.com/sashabaranov/go-openai"
	"github.com/slclub/glog"
	"github.com/slclub/go-tips/json"
	"github.com/slclub/go-tips/stringbyte"
	"io"
	"net/http"
	"strings"
)

func (self *AppGPT) ChatCompletion(question *Request) *ChatResponse {
	ctx := context.Background()
	response := &ChatResponse{
		MsgCode: code.SUCCESS,
	}
	if question.Text == "" {
		response.MsgCode = code.DATA_NIL
		glog.Errorf("CHATGPT.ChatCompletion.REQ.TEXT is empty!")
		return response
	}

	cnf := self.Config()

	messages := []gogpt.ChatCompletionMessage{}
	json.Unmarshal(stringbyte.StringToBytes(question.Text), &messages)
	if question.User == nil {
		question.User = &User{Role: "user", Name: "TD", UID: "TD-1"}
	}
	model := question.Model
	if model == "" {
		model = cnf.Chat.Model
	}
	glog.Infof("CHATGPT.ChatCompletion.REQ prompt is %v", messages)
	req := gogpt.ChatCompletionRequest{
		Model:            model,
		MaxTokens:        cnf.Chat.MaxTokens,
		TopP:             cnf.Chat.TopP,
		FrequencyPenalty: cnf.Chat.FrequencyPenalty,
		PresencePenalty:  cnf.Chat.PresencePenalty,
		N:                1,
		Messages:         messages,
		Temperature:      cnf.Chat.Temperature,
		User:             question.User.UID,
		//Stop:             question.Stop,
	}

	resp, err := self.Client().CreateChatCompletion(ctx, req)
	response.resp = resp
	if err != nil {
		glog.Errorf("CHATGP.ChatCompletion.RESPONSE.ERROR %v, %v", err, req)
		response.MsgCode = code.DATA_CALL_ERROR
		return response
	}
	glog.Debug("CHATGPT.ChatCompletion.RESPONSE.OK", resp.Choices, model)
	return response
}

func (self *AppGPT) ChatCompletionStream(question *Request) *gogpt.ChatCompletionStream {
	ctx := context.Background()
	response := &ChatResponse{
		MsgCode: code.SUCCESS,
	}
	if question.Text == "" {
		response.MsgCode = code.DATA_NIL
		glog.Errorf("CHATGPT.ChatCompletion.REQ.TEXT is empty!")
		return nil
	}

	cnf := self.Config()

	messages := []gogpt.ChatCompletionMessage{}
	json.Unmarshal(stringbyte.StringToBytes(question.Text), &messages)
	if question.User == nil {
		question.User = &User{Role: "user", Name: "TD", UID: "TD-1"}
	}
	model := question.Model
	if model == "" {
		model = cnf.Chat.Model
	}
	glog.Infof("CHATGPT.ChatCompletion.REQ prompt is %v", messages)
	req := gogpt.ChatCompletionRequest{
		Model:            model,
		MaxTokens:        cnf.Chat.MaxTokens,
		TopP:             cnf.Chat.TopP,
		FrequencyPenalty: cnf.Chat.FrequencyPenalty,
		PresencePenalty:  cnf.Chat.PresencePenalty,
		N:                1,
		Messages:         messages,
		Temperature:      cnf.Chat.Temperature,
		User:             question.User.UID,
		//Stop:             question.Stop,
	}

	resp, err := self.Client().CreateChatCompletionStream(ctx, req)
	//response.resp = resp
	if err != nil {
		glog.Errorf("CHATGP.ChatCompletion.RESPONSE.ERROR %v, %v", err, req)
		response.MsgCode = code.DATA_CALL_ERROR
		return nil
	}
	glog.Debug("CHATGPT.ChatCompletion.RESPONSE.OK", model)
	return resp
}

func (self *AppGPT) ConvChat(question string, model string, user *User, stop string) *Response {
	result := AppChatGPT.ChatCompletion(&Request{Text: question, User: user, Model: model, Stop: strings.Split(stop, ",")})
	res := &Response{
		MsgCode: result.MsgCode,
		Text:    "",
		Tokens:  0,
	}
	if res.MsgCode != code.SUCCESS {
		return res
	}
	if len(result.resp.Choices) == 0 {
		return res
	}
	data := result.resp.Choices[0].Message
	res.Text = data.Content
	res.Tokens = result.resp.Usage.TotalTokens
	//usage := res.resp.Usage // 使用tokens 情况
	return res
}

func (self *AppGPT) ConvChatStream(w http.ResponseWriter, question string, model string, user *User, stop string) {
	stoparr := []string{}
	if stop != "" {
		stoparr = strings.Split(stop, ",")
	}
	w.Header().Set("Content-Type", "text/event-stream; charset=utf-8")
	w.Header().Set("Cache-Control", "no-cache")
	w.Header().Set("Connection", "keep-alive")
	w.Header().Set("Keep-Alive", "timeout=5")
	w.Header().Set("Transfer-Encoding", "chunked")
	stream := AppChatGPT.ChatCompletionStream(&Request{Text: question, Model: model, Stop: stoparr})
	if stream == nil {
		return
	}
	defer stream.Close()
	for {
		response, err := stream.Recv()
		if errors.Is(err, io.EOF) {
			glog.Debug("ChatStream finished")
			return
		}

		if err != nil {
			glog.Errorf("ChatStream error: %v\n", err)
			return
		}
		//gogpt.ChatCompletionStreamResponse
		if len(response.Choices) == 0 {
			continue
		}
		fmt.Fprintf(w, "%v", response.Choices[0].Delta.Content)
		//fmt.Printf("Stream response: %v\n", response)
	}
}
