package process

import (
	"context"
	"encoding/json"
	"log/slog"
	"path/filepath"
	"quipus/internal/database/model"
	"quipus/internal/llm"
	"quipus/internal/llm/tool"

	"quipus/internal/llm/factory"
	llmMaker "quipus/internal/llm/factory"
	"quipus/internal/rag/rag"
	"quipus/internal/utils"
	v1pb "quipus/proto/gen/api/v1"
	apb "quipus/proto/gen/assist"
	cpb "quipus/proto/gen/common"
	"strings"
	"time"

	"github.com/lithammer/shortuuid/v4"
	"github.com/spf13/viper"
	"google.golang.org/grpc/codes"
	"google.golang.org/grpc/status"
)

type LLMWorkflow struct {
	Wkp             *LLMWorkflowProcess
	Name            string
	Inputs          []string
	Resources       []string
	InputLen        int32
	ChatClient      llm.LLM_Chat
	SystemPrompt    string
	Rgb             *rag.RagBuilder
	WorkChain       []*LLMWorkflow
	WorkType        v1pb.WorkType
	AssistantId     int32
	AssistantName   string
	AssistantAvatar string
	Pos             v1pb.FlowPos
	GranaryScope    v1pb.GranaryScope
	Docs            []string
	Messages        []llm.RoleContent
	ToolPool        *tool.ToolPool
}

type LLMWorkflowWrapper struct {
	*LLMWorkflow
	Ctx    context.Context
	Cancel context.CancelFunc
}

type LLMWorkflowProcess struct {
	*Process
	Workflow   *LLMWorkflow
	RagMode    cpb.RagMode
	UserId     int32
	UserAvatar string
	UserName   string
	Granary    *model.Granary
}

func (p *Process) NewWorkflowProcess(ctx context.Context, subject *model.Subject, responders []int32) (LLM_Process, error) {
	emid := int32(0)
	var docs []string
	ragMode := cpb.RagMode(cpb.RagMode_value[subject.RagMode])
	userId := responders[0]
	wkp := &LLMWorkflowProcess{
		Process: p,
		RagMode: ragMode,
		UserId:  userId,
	}

	if subject != nil && subject.Granaries != "{}" {
		var granaries []string

		e := json.Unmarshal([]byte(subject.Granaries), &granaries)
		if e != nil {
			slog.Error("Failed to unmarshal docs", slog.Any("error", e))
			return nil, e
		}

		var defaultGra *model.Granary = nil
		for _, g := range granaries {

			gid, err := utils.ExtractIDFromUrn(utils.GranaryUrnPrefix, g)
			if err != nil {
				slog.Error("Failed to extract granary id", slog.Any("error", err))
				return nil, err
			}
			granary, err := p.Gds.Handler.GetGranary(ctx, gid)
			if err != nil {
				slog.Error("Failed to get granary", slog.Any("error", err))
				continue
			}
			if defaultGra == nil {
				defaultGra = granary
			}
			gstatus := v1pb.GranaryStatus(v1pb.GranaryStatus_value[granary.Status])
			if gstatus > v1pb.GranaryStatus_CHUNKING && gstatus < v1pb.GranaryStatus_ENDED {
				ragMode = cpb.RagMode_NAIVE
			}

			if err != nil {
				slog.Error("Failed to get granary", slog.Any("error", err))
				return nil, err
			}
			emid = granary.EmbeddingModelID
			docs = append(docs, granary.DocID)
		}
		if defaultGra != nil {
			wkp.Granary = defaultGra
		}
	}

	var err error
	var workFlow *LLMWorkflow

	virtUser, err := p.Gds.Handler.GetVirtUser(ctx, userId)
	if err != nil {
		return nil, err
	}

	artType := v1pb.ArtifactType(v1pb.ArtifactType_value[virtUser.VirtType])
	switch artType {
	case v1pb.ArtifactType_ARTIFACT_ASSISTANT:
		workFlow, err = wkp.buildAssistantlArtifact(ctx, virtUser.VirtID, docs, emid)
		if err != nil {
			return nil, err
		}
		break
	case v1pb.ArtifactType_ARTIFACT_WORKFLOW:
		workFlow, err = wkp.buildFlowArtifact(ctx, virtUser.VirtID, docs, emid)
		if err != nil {
			return nil, err
		}
		break
	}

	wkp.Workflow = workFlow

	return wkp, nil
}

func (wkp *LLMWorkflowProcess) buildFlowArtifact(ctx context.Context, flowId int32, docs []string, emid int32) (*LLMWorkflow, error) {
	flow, err := wkp.Gds.Handler.GetWorkflow(ctx, &apb.FindWorkflow{Id: flowId})
	if err != nil {
		return nil, err
	}

	return wkp.buildWorkflow(ctx, flow, docs, emid)
}

func (wkp *LLMWorkflowProcess) buildAssistantlArtifact(ctx context.Context, assistantId int32, docs []string, emid int32) (*LLMWorkflow, error) {
	assistant, err := wkp.Gds.Handler.GetLLMAssistant(ctx, assistantId)
	if err != nil {
		return nil, err
	}

	workflow := &LLMWorkflow{
		Wkp:          wkp,
		InputLen:     1,
		Name:         assistant.Name,
		AssistantId:  assistantId,
		SystemPrompt: assistant.SystemPrompt,
		WorkType:     v1pb.WorkType_GATHER_WORK,
		Pos:          v1pb.FlowPos_FLOW_TAIL,
	}

	err = wkp.setWorkflow(ctx, workflow, docs, assistant, emid)
	if err != nil {
		return nil, err
	}
	return workflow, nil
}

func (wkp *LLMWorkflowProcess) setWorkflow(ctx context.Context, workflow *LLMWorkflow, docs []string, assistant *model.LlmAssistant, emid int32) error {
	var config *llmMaker.ChatClientConfig
	if len(docs) > 0 {
		workflow.Docs = docs
		ragClientConfig := wkp.NewRagConfig()
		rgb, e := wkp.SetRagAssistant(assistant.ModelID, emid, ragClientConfig)
		if e != nil {
			return e
		}
		workflow.Rgb = rgb
		config = ragClientConfig.ChatConfig
	} else {
		provider, model, err := wkp.GetModelProvider(ctx, assistant.ModelID)
		if err != nil {
			slog.Error("Failed to get assistant provider", slog.Any("error", err))
			return err
		}

		if provider == nil {
			slog.Error("No assistant provider found")
			return err
		}

		authInfo := ""
		if provider.AuthInfo != "" {
			key := []byte(viper.GetString("encryption-key"))
			authInfo, err = utils.Decrypt(provider.AuthInfo, key)
		}

		config = &llmMaker.ChatClientConfig{
			API:      provider.APIName,
			Url:      provider.AccessURL,
			AuthInfo: authInfo,
			Model:    model.Model,
		}
	}
	workflow.ChatClient = factory.NewChatClient(config)

	// workflow.ToolPool = tool.NewToolPool()
	// workflow.ToolPool.AddTool(NewTopicTool())
	return nil
}

func (p *Process) Init() {
	p.WorkflowMap = make(map[string]*LLMWorkflow)
}

func (wkp *LLMWorkflowProcess) buildWorkflow(ctx context.Context, flow *model.Workflow, gdocs []string, gemid int32) (*LLMWorkflow, error) {
	w := wkp.WorkflowMap[flow.Name]
	if w != nil {
		return w, nil
	}

	emid := int32(0)
	workflow := &LLMWorkflow{
		Wkp:      wkp,
		InputLen: flow.InputLen,
		Name:     flow.Name,
		WorkType: v1pb.WorkType(v1pb.WorkType_value[flow.WorkType]),
	}

	if flow.Pos == "" {
		workflow.Pos = v1pb.FlowPos_FLOW_POS_UNKOWN
	} else {
		workflow.Pos = v1pb.FlowPos(v1pb.FlowPos_value[flow.Pos])
	}

	if flow.GranaryScope == "" {
		workflow.GranaryScope = v1pb.GranaryScope_UNUSE_GRANARY
	} else {
		workflow.GranaryScope = v1pb.GranaryScope(v1pb.GranaryScope_value[flow.GranaryScope])
	}

	var docs []string
	if workflow.GranaryScope == v1pb.GranaryScope_SUBJECT_GRANARY {
		docs = append(docs, gdocs...)
		emid = gemid
	} else if workflow.GranaryScope == v1pb.GranaryScope_WORKFLOW_GRANARY {
		if flow.Granaries != "{}" {
			var granaries []string

			e := json.Unmarshal([]byte(flow.Granaries), &granaries)
			if e != nil {
				slog.Error("Failed to unmarshal docs", slog.Any("error", e))
				return nil, e
			}

			for _, g := range granaries {
				gid, err := utils.ExtractIDFromUrn(utils.GranaryUrnPrefix, g)
				if err != nil {
					slog.Error("Failed to extract granary id", slog.Any("error", err))
					return nil, err
				}

				granary, err := wkp.Gds.Handler.GetGranary(ctx, gid)
				if granary == nil {
					continue
				}
				if emid == 0 {
					emid = granary.EmbeddingModelID
				}

				docs = append(docs, granary.DocID)
			}
		}
	}

	wkp.WorkflowMap[flow.Name] = workflow
	if flow.Assistant != "" {
		assistantId, err := utils.ExtractIDFromUrn(utils.AssistantUrnPrefix, flow.Assistant)
		if err != nil {
			return nil, err
		}

		workflow.AssistantId = assistantId
		assistant, err := wkp.Gds.Handler.GetLLMAssistant(ctx, assistantId)
		if err != nil {
			return nil, err
		}
		workflow.AssistantName = assistant.Name
		workflow.AssistantAvatar = assistant.AvatarURL

		var config *llmMaker.ChatClientConfig
		workflow.SystemPrompt = assistant.SystemPrompt

		err = wkp.setWorkflow(ctx, workflow, docs, assistant, emid)
		if err != nil {
			return nil, err
		}

		workflow.ChatClient = factory.NewChatClient(config)
	}

	wkp.buildWorkChain(ctx, workflow, flow.ID, gdocs, gemid)

	return workflow, nil
}

func (wkp *LLMWorkflowProcess) buildWorkChain(ctx context.Context, workflow *LLMWorkflow, flowId int32, docs []string, emid int32) error {
	if workflow == nil {
		return nil
	}

	relations, err := wkp.Gds.Handler.ListWorkflowRelation(ctx, &apb.WorkflowRelation{
		FlowId: flowId,
	})

	if err != nil {
		return err
	}

	for _, relation := range relations {
		flowModel, err := wkp.Gds.Handler.GetWorkflow(ctx, &apb.FindWorkflow{
			Id: relation.NextFlowID,
		})

		if err != nil {
			return err
		}

		next, err := wkp.buildWorkflow(ctx, flowModel, docs, emid)
		if err != nil {
			return err
		}

		workflow.WorkChain = append(workflow.WorkChain, next)
	}
	return nil
}

func (wkp *LLMWorkflowProcess) ProcessComplete(ctx context.Context, content string, srcKnot *model.Knot) (string, error) {
	answerChan := make(chan *v1pb.Exchange, 5)
	defer close(answerChan)

	go wkp.RecordAnswer(ctx, srcKnot.ID, answerChan)
	wkp.Workflow.Inputs = append(wkp.Workflow.Inputs, content)
	return wkp.Workflow.Complete(ctx, wkp.RagMode, answerChan)
}

func (wkp *LLMWorkflowProcess) RecordAnswer(ctx context.Context, srcAskId int32, answerChan <-chan *v1pb.Exchange) error {
	for {
		select {
		case <-ctx.Done():
			slog.Info("record answer service is done")
			return nil
		case message, ok := <-answerChan:
			if !ok {
				slog.Info("chat service is closed")
				return nil
			}
			if message == nil {
				break
			}
			slog.Debug("recv record answer req. ", slog.Any("knot", message))

			askId := srcAskId
			if message.AskId != "" {
				tmpId, err := utils.ExtractIDFromUrn(utils.KnotUrnPrefix, message.AskId)
				if err != nil {
					slog.Error("Error extracting knot ID from name: %v\n", slog.Any("err", err))
					continue
				}
				askId = tmpId

			}
			_, err := wkp.createAssistKnot(ctx, wkp.UserId, askId, &v1pb.CreateKnotRequest{Content: message.Content, Visibility: cpb.Visibility_PRIVATE})
			if err != nil {
				slog.Error("Error creating ack knot: %v\n", slog.Any("err", err))
				break
			}
		case <-time.After(10 * time.Second): // 添加超时处理
			slog.Info("Wait RecordAnswer")
		}
	}
}

func (p *Process) createAssistKnot(ctx context.Context, assitUserId int32, parent int32, request *v1pb.CreateKnotRequest) (*model.Knot, error) {

	if request.Visibility < cpb.Visibility_PRIVATE || request.Visibility > cpb.Visibility_PUBLIC {
		request.Visibility = cpb.Visibility_PRIVATE
	}

	create := &model.Knot{
		UID:          "A_" + shortuuid.New(),
		CreatorID:    assitUserId,
		Subject:      request.Subject,
		InteractType: request.InteractType.String(),
		Content:      request.Content,
		ParentID:     parent,
		Visibility:   request.Visibility.String(),
	}

	err := p.Gds.Handler.CreateKnot(ctx, create)
	return create, err
}

func (p *Process) GetResourcePath(ctx context.Context, id int32) (string, string, error) {
	resourceFind := &apb.FindResource{
		FindBlob: true,
		Id:       id,
	}

	resource, err := p.Gds.Handler.GetResource(ctx, resourceFind)
	if err != nil {
		return "", "", status.Errorf(codes.Internal, "failed to get resource: %v", err)
	}

	if resource == nil {
		return "", "", status.Errorf(codes.NotFound, "resource not found")
	}

	if resource.Type == "LINK" {
		return resource.Reference, resource.Type, nil
	}

	if resource.StorageType == apb.ResourceStorageType_LOCAL.String() {
		resourcePath := filepath.FromSlash(resource.Reference)
		if !filepath.IsAbs(resourcePath) {
			resourcePath = filepath.Join(p.DataPath, resourcePath)
		}

		return resourcePath, resource.Type, nil
	}

	return "", "", nil
}

func (wkp *LLMWorkflowProcess) ProcessChat(ctx context.Context, req *LLMReq, srcKnot *model.Knot) error {

	answerChan := make(chan *v1pb.Exchange, 5)
	cctx, cancel := context.WithCancel(context.Background())
	defer close(answerChan)
	defer cancel()

	for {
		select {
		case <-ctx.Done():
			slog.Info("workflow for chat service is done")
			cancel()
			return nil
		case inMsg, ok := <-req.InMessageChan:
			if !ok {
				slog.Info("recv error.")
				return nil
			}

			if inMsg.Phase == v1pb.Phase_End {
				slog.Info("Received end message:", slog.Any("id", inMsg.AskId))
				cancel()
				cctx, cancel = context.WithCancel(context.Background())
				break
			}

			if inMsg.Phase == v1pb.Phase_HELLO {
				go wkp.RecordAnswer(cctx, srcKnot.ID, answerChan)
			}

			if inMsg.Content == "" && len(inMsg.Resources) == 0 {
				continue
			}
			if inMsg.Phase == v1pb.Phase_Recommend {
				if wkp.Workflow == nil {
					continue
				}

				go wkp.Workflow.RecommendTopic(cctx, req, inMsg.Content, inMsg.AskId)
				continue
			}

			msgcontent := wkp.Workflow.BuildChatMessage(v1pb.Phase_Prepare, "正在分析中...", inMsg.AskId)
			sendOutMessage(cctx, req.OutMessageChan, msgcontent)

			req.AskId = inMsg.AskId
			req.AnswerChan = answerChan
			wkp.Workflow.ClearInput()
			wkp.Workflow.Inputs = append(wkp.Workflow.Inputs, inMsg.Content)
			if len(inMsg.Resources) > 0 {
				if wkp.Granary != nil {
					docs := []*apb.RagResource{}
					for _, r := range inMsg.Resources {
						id, e := utils.ExtractIDFromUrn(utils.ResourceUrnPrefix, r.Urn)
						if e != nil {
							slog.Error("failed to extract id from urn", slog.Any("urn", r.Urn))
							continue
						}
						path, fileType, err := wkp.GetResourcePath(ctx, id)
						if err != nil {
							slog.Error("failed to get resource path", slog.Any("error", err))
							continue
						}

						d := &apb.RagResource{
							RsType: fileType,
							RsPath: path,
							RsId:   id,
							Gid:    srcKnot.ID,
						}
						docs = append(docs, d)
					}
					options := make(map[string]string)
					if wkp.Granary.Options != "{}" {
						e := json.Unmarshal([]byte(wkp.Granary.Options), &options)
						if e != nil {
						}
					}
					options["drop"] = "false"
					options["summary"] = "false"
					msgcontent := wkp.Workflow.BuildChatMessage(v1pb.Phase_Prepare, "解析文档...", inMsg.AskId)
					sendOutMessage(cctx, req.OutMessageChan, msgcontent)
					wkp.Workflow.Rgb.ProcessDoc(docs, wkp.Granary.DocID, options, nil, nil)
				}
			}

			if wkp.Workflow.Messages == nil {
				wkp.Workflow.Messages = []llm.RoleContent{}
				if req.History != nil {
					wkp.Workflow.Messages = append(wkp.Workflow.Messages, *req.History...)
				}
			}

			ragMode := wkp.RagMode
			if inMsg.Mode != cpb.RagMode_RAG_MODE_UNSPECIFIED {
				ragMode = inMsg.Mode
			}

			go func() {
				err := wkp.Workflow.Chat(cctx, ragMode, req)
				if err != nil {
					sendOutMessage(cctx, req.OutMessageChan, wkp.Workflow.BuildChatMessage(v1pb.Phase_End, "Error: "+err.Error(), inMsg.AskId))
				}
			}()
		}

	}
}

func (w *LLMWorkflow) BuildChatMessage(phase v1pb.Phase, content string, askId string) *v1pb.Exchange {
	return &v1pb.Exchange{UserId: w.Wkp.UserId, UserName: w.Wkp.UserName, Avatar: w.Wkp.UserAvatar, Phase: phase, Content: content, AskId: askId}
}

func (w *LLMWorkflow) ClearInput() {
	w.Inputs = w.Inputs[:0]
	if w.WorkChain != nil {
		for _, next := range w.WorkChain {
			next.ClearInput()
		}
	}
}

func (w *LLMWorkflow) Complete(ctx context.Context, ragMode cpb.RagMode, answer chan<- *v1pb.Exchange) (string, error) {

	out, err := w.doComplete(ctx, ragMode)
	if err != nil {
		return "", err
	}

	if w.AssistantId > 0 && len(out) > 1 {
		answer <- &v1pb.Exchange{AskId: "", Content: out, Phase: v1pb.Phase_End, UserId: w.AssistantId}
	}

	if w.WorkChain != nil {
		for _, next := range w.WorkChain {
			next.Inputs = append(next.Inputs, out)
			if next.WorkType == v1pb.WorkType_GATHER_WORK && len(next.Inputs) < int(next.InputLen) {
				continue
			}
			next.Complete(ctx, ragMode, answer)
		}
	} else {
		return out, nil
	}
	return "", nil
}

func (w *LLMWorkflow) doComplete(ctx context.Context, ragMode cpb.RagMode) (string, error) {
	prompt := w.SystemPrompt
	content := strings.Join(w.Inputs, " ")
	out := content
	var err error

	if w.AssistantId > 0 {
		if w.Rgb != nil {
			useDefault := true
			if len(w.SystemPrompt) > 0 {
				useDefault = false
			}

			slog.Debug("build rag prompt")
			prompt, err = w.Rgb.QueryLLMRagPrompt(content, w.Docs, useDefault, ragMode)
			if err != nil {
				return "", err
			}
		}

		if w.SystemPrompt != "" {
			if len(w.Messages) > 0 {
				if w.Messages[0].Role == llm.RoleSys {
					w.Messages[0].Content = w.SystemPrompt + prompt
				} else {
					w.Messages = append([]llm.RoleContent{
						{
							Role:    llm.RoleSys,
							Content: w.SystemPrompt + prompt,
						},
					}, w.Messages...)
				}
			}
		}

		w.Messages = append(w.Messages, llm.RoleContent{
			Role:    llm.RoleUser,
			Content: content,
		})

		req := &llm.CompleteReq{Msgs: w.Messages}
		assistantMsg, err := w.ChatClient.Complete(ctx, req)
		if err != nil {
			return "", err
		}
		if assistantMsg == nil {
			return "", nil
		}

		w.Messages = append(w.Messages, llm.RoleContent{
			Role:    llm.RoleAssistant,
			Content: assistantMsg.GetContent(),
		})
		return assistantMsg.GetContent(), err
	}
	return out, nil
}

func (w *LLMWorkflowWrapper) doChat(ctx context.Context, ragMode cpb.RagMode, req *LLMReq) error {
	prompt := w.SystemPrompt
	content := strings.Join(w.Inputs, " ")
	var err error
	if w.AssistantId < 1 {
		return nil
	}
	if w.Rgb != nil {
		useDefault := true
		if len(w.SystemPrompt) > 0 {
			useDefault = false
		}

		prompt, err = w.Rgb.QueryLLMRagPrompt(content, w.Docs, useDefault, ragMode)
		if err != nil {
			return err
		}

		prompt = w.SystemPrompt + prompt
	}

	if len(w.Messages) > 0 {
		if w.Messages[0].Role == llm.RoleSys {
			w.Messages[0].Content = prompt
		}
	} else {
		w.Messages = append([]llm.RoleContent{
			{
				Role:    llm.RoleSys,
				Content: prompt,
			},
		}, w.Messages...)
	}

	w.Messages = append(w.Messages, llm.RoleContent{
		Role:    llm.RoleUser,
		Content: content,
	})

	slog.Info("messages:", w.Messages)

	slog.Info("begin chat")

	w.doChatStep(req)

	return nil
}

func (w *LLMWorkflow) RecommendTopic(ctx context.Context, srcReq *LLMReq, content string, askId string) {
	if w.Pos != v1pb.FlowPos_FLOW_TAIL {
		for _, next := range w.WorkChain {
			next.RecommendTopic(ctx, srcReq, content, askId)
		}
	}

	wrapper := &LLMWorkflowWrapper{
		LLMWorkflow: w,
		Ctx:         ctx,
	}

	wrapper.doRecommendTopic(srcReq, content, askId)
}

func (w *LLMWorkflowWrapper) doRecommendTopic(srcReq *LLMReq, content string, askId string) {
	messages := []llm.RoleContent{}
	messages = append(messages, llm.RoleContent{Role: llm.RoleSys, Content: RECOMMEND_TOPIC})
	messages = append(messages, llm.RoleContent{Role: llm.RoleUser, Content: content})
	req := &llm.CompleteReq{Msgs: messages}
	rt, _ := w.ChatClient.Complete(w.Ctx, req)
	slog.Info("recommend topic: ", slog.Any("rt", rt))
	if !sendOutMessage(w.Ctx, srcReq.OutMessageChan, w.BuildChatMessage(v1pb.Phase_Recommend, rt.GetContent(), askId)) {
		return
	}
}

func (w *LLMWorkflowWrapper) doChatStep(srcReq *LLMReq) error {
	chatReq := &llm.ChatReq{Msgs: w.Messages}
	chatReq.RspChan = make(chan string)
	chatReq.CompleteChan = make(chan bool)
	chatReq.CompleteMessageChan = make(chan llm.Message)
	if w.ToolPool != nil {
		chatReq.Tools = w.ToolPool.GetTools()
	}

	var assistSb strings.Builder
	go func() {
		select {
		case <-w.Ctx.Done():
			slog.Info("recv cancel, return now.")
			return
		default:
			sendOutMessage(w.Ctx, srcReq.OutMessageChan, w.BuildChatMessage(v1pb.Phase_Begin, "..", srcReq.AskId))

			w.ChatClient.Chat(w.Ctx, chatReq)
		}

	}()

	waitContent := true
	for {
		select {
		case <-w.Ctx.Done():
			slog.Info("recv cancel. break now.")
			return nil

		case rsp := <-chatReq.RspChan:
			waitContent = false
			assistSb.WriteString(rsp)
			sendOutMessage(w.Ctx, srcReq.OutMessageChan, w.BuildChatMessage(v1pb.Phase_Continue, rsp, srcReq.AskId))

		case msg := <-chatReq.CompleteMessageChan:
			slog.Info("recv complete message: ", slog.Any("msg", msg))
			if msg.GetToolCalls() != nil {
				w.ToolPool.Execute(msg.GetToolCalls())
			}

		case complete := <-chatReq.CompleteChan:
			if complete {
				completeMsg := assistSb.String()
				assistSb.Reset()
				w.Messages = append(w.Messages, llm.RoleContent{
					Role:    llm.RoleAssistant,
					Content: completeMsg,
				})

				if !sendOutMessage(w.Ctx, srcReq.OutMessageChan, w.BuildChatMessage(v1pb.Phase_End, "", srcReq.AskId)) {
					return nil
				}

				if len(w.Messages) > 110 {
					if !sendOutMessage(w.Ctx, srcReq.OutMessageChan, w.BuildChatMessage(v1pb.Phase_Close, "记录超过50条，请新建会话", srcReq.AskId)) {
						return nil
					}
				}
				return nil
			}
		case <-time.After(5 * time.Second):
			if waitContent {
				sendOutMessage(w.Ctx, srcReq.OutMessageChan, w.BuildChatMessage(v1pb.Phase_Begin, "..", srcReq.AskId))
			}
			// 超时处理
		}
	}
}

func sendOutMessage(ctx context.Context, outMessageChan chan<- *v1pb.Exchange, message *v1pb.Exchange) bool {
	select {
	case <-ctx.Done():
		slog.Warn("chat service context cancelled, stopping message send")
		return false
	default:
		// 如果 context 没有取消，尝试发送消息
		select {
		case outMessageChan <- message:
			return true
		default:
			slog.Warn("channel full or closed")
			return false
		}
	}
}

func (w *LLMWorkflow) Chat(ctx context.Context, ragMode cpb.RagMode, req *LLMReq) error {
	slog.Info("start chat, work flow info:", slog.Any("workflow", w.Name))
	if w.Pos == v1pb.FlowPos_FLOW_TAIL {
		wrapper := &LLMWorkflowWrapper{
			LLMWorkflow: w,
			Ctx:         ctx,
		}
		wrapper.doChat(ctx, ragMode, req)
		return nil
	}
	out, err := w.doComplete(ctx, ragMode)
	if err != nil {
		return err
	}

	if w.AssistantId > 0 && len(out) > 1 {
		req.AnswerChan <- &v1pb.Exchange{AskId: req.AskId, Content: out, Phase: v1pb.Phase_End, UserId: w.AssistantId, Direct: v1pb.ExchageDirect_EXCHANGE_ANSWER}
	}

	if w.WorkChain != nil {
		for _, next := range w.WorkChain {
			select {
			case <-ctx.Done():
				slog.Info("recv cancel")
				return nil
			default:
				next.Inputs = append(next.Inputs, out)
				if next.WorkType == v1pb.WorkType_GATHER_WORK && len(next.Inputs) < int(next.InputLen) {
					continue
				}
				err = next.Chat(ctx, ragMode, req)
				if err != nil {
					return err
				}
			}
		}
	}

	return nil
}
