package rag

import (
	"bytes"
	"context"
	"encoding/csv"
	"encoding/json"
	"fmt"
	"html"
	"log/slog"
	"quipus/internal/crawl"
	"quipus/internal/llm"

	llmMaker "quipus/internal/llm/factory"
	ragFile "quipus/internal/rag/file"
	ragStore "quipus/internal/rag/storage"
	"quipus/internal/utils"
	v1pb "quipus/proto/gen/api/v1"
	apb "quipus/proto/gen/assist"
	cpb "quipus/proto/gen/common"
	"regexp"
	"strings"
	"text/template"

	"github.com/pkoukk/tiktoken-go"
	"google.golang.org/protobuf/encoding/protojson"
)

type RagBuilder struct {
	embedding   llm.LLM_Embedding
	query       llm.LLM_Chat
	vectordb    *ragStore.MilvusService
	graphdb     *ragStore.DgraphService
	Tkm         *tiktoken.Tiktoken
	CacheText   map[string]string
	MaxTokens   int
	ChunkTokens int
	tokensCount int
	CacheChunk  map[string]string
	Ctx         context.Context
	Cancel      context.CancelFunc
	Status      v1pb.GranaryStatus
	GranaryId   int32
	index       int32
	ScoreBias   float32
	EndFlags    map[string]bool
}

type RagClientConfig struct {
	VectorConfig    *ragStore.VectorDBConfig
	GraphConfig     *ragStore.GraphDBConfig
	EmbeddingConfig *llmMaker.EmbeddingClientConfig
	ChatConfig      *llmMaker.ChatClientConfig
	MaxTokens       int
	ChunkTokens     int
	ScoreBias       float32
}

func NewRagBuilder(ctx context.Context, config *RagClientConfig) (*RagBuilder, error) {

	vectordb, err := ragStore.NewMilvusService(ctx, config.VectorConfig)
	if err != nil {
		return nil, err
	}
	vectordb.Dim = config.VectorConfig.Dim
	vectordb.Ctx = ctx

	graphdb, err := ragStore.NewDgraphService(ctx, config.GraphConfig)
	if err != nil {
		return nil, err
	}
	err = graphdb.SetDocSchema()
	if err != nil {
		return nil, err
	}

	tkm, err := tiktoken.EncodingForModel("gpt-4")
	if err != nil {
		return nil, err
	}

	ragBuild := &RagBuilder{
		vectordb:    vectordb,
		graphdb:     graphdb,
		Tkm:         tkm,
		MaxTokens:   config.MaxTokens,
		ChunkTokens: config.ChunkTokens,
		ScoreBias:   config.ScoreBias,
		CacheChunk:  make(map[string]string),
		CacheText:   make(map[string]string),
	}

	if config.EmbeddingConfig != nil {
		ragBuild.SetEmbedding(config.EmbeddingConfig)
	}

	if config.ChatConfig != nil {
		ragBuild.SetQuery(config.ChatConfig)
		if config.ChatConfig.MaxTokens > 0 {
			ragBuild.MaxTokens = config.ChatConfig.MaxTokens
		}
	}
	return ragBuild, nil
}

func (b *RagBuilder) SetEmbedding(c *llmMaker.EmbeddingClientConfig) {
	b.embedding = llmMaker.NewEmbeddingClient(c)
	if c.Dim > 0 {
		b.vectordb.Dim = c.Dim
	}
}

func (b *RagBuilder) SetQuery(c *llmMaker.ChatClientConfig) {
	b.query = llmMaker.NewChatClient(c)
}

func (b *RagBuilder) GetQuery() llm.LLM_Chat {
	return b.query
}

func (b *RagBuilder) DeleteRag(docId string) {
	slog.Info("Begin delete rag", slog.Any("docId", docId))
	b.vectordb.DeleteCollection(docId, ragStore.EntityPrefix)
	b.vectordb.DeleteCollection(docId, ragStore.RelationPrefix)
	b.vectordb.DeleteCollection(docId, ragStore.ChunkPrefix)
	b.vectordb.DeleteCollection(docId, ragStore.DocSummaryPrefix)
	b.vectordb.DeleteCollection(docId, ragStore.GranarySummaryPrefix)
	b.vectordb.DeleteCollection(docId, ragStore.BuildLogPrefix)
	b.graphdb.DeleteDoc(docId)
	slog.Info("End delete rag", slog.Any("docId", docId))
}

func (b *RagBuilder) ChunkDoc(docId string, doc *apb.RagResource) error {
	if doc.RsType == "LINK" {
		page, err := crawl.GetPage(doc.RsPath)
		if err != nil {
			slog.Error("crawl error", slog.Any("err", err))
			return err
		}
		err = b.ProcessWebPage(docId, page, doc.RsId)
		if err != nil {
			slog.Error("process web page error", slog.Any("err", err))
			b.Status = v1pb.GranaryStatus_FAILED
			return err
		}
	} else {
		return b.ChunkDocToVector(docId, doc.RsId, doc.RsPath)
	}
	return nil
}

func (b *RagBuilder) hasDocPhaseBuilded(docId string, urn string) bool {
	blgs, _ := b.vectordb.GetBuildLog(docId, []string{urn})
	if len(blgs) > 0 {
		blg := blgs[0]
		if blg.Status == apb.BuildStatus_SUCCESS.String() {
			return true
		}
	}
	return false
}

func (b *RagBuilder) ProcessDoc(docs []*apb.RagResource, docId string, options map[string]string,
	updateGranary func(ctx context.Context, id int32, status v1pb.GranaryStatus, summary string, tags []string) error,
	updateGranaryResource func(ctx context.Context, gid int32, rid int32, status v1pb.GranaryStatus, summary string, tags []string) error) error {

	dropExist := true
	if options["drop"] == "false" {
		dropExist = false
	} else {
		b.DeleteRag(docId)
	}

	updGry := func(summary string, tags []string) {
		if updateGranary != nil {
			updateGranary(b.Ctx, b.GranaryId, b.Status, summary, tags)
		}
	}

	updGryRes := func(rsId int32, status v1pb.GranaryStatus, summary string, tags []string) {
		if updateGranaryResource != nil {
			updateGranaryResource(b.Ctx, b.GranaryId, rsId, status, summary, tags)
		}
	}

	err := b.CreateCollection(docId, dropExist)
	if err != nil {
		slog.Error("create collection failed", slog.Any("err", err))

		b.Status = v1pb.GranaryStatus_FAILED
		if updateGranary != nil {
			updGry("", nil)
		}
		return err
	}

	b.Status = v1pb.GranaryStatus_CHUNKING
	updGry("", nil)
	for _, d := range docs {
		select {
		case <-b.Ctx.Done():
			slog.Info("process is stopped")
			return nil
		default:
			slog.Info("chunk doc to vector", slog.Any("file", d.RsPath))
			updGryRes(d.RsId, v1pb.GranaryStatus_CHUNKING, "", nil)
			urn := fmt.Sprintf("%s_%d_%s", docId, d.RsId, apb.BuildPhase_CHUNK.String())

			if b.hasDocPhaseBuilded(docId, urn) {
				continue
			}

			// ragStore.GetChunk
			blg := &ragStore.BuildLog{
				ChunkId: urn,
				RsId:    d.RsId,
				Phase:   apb.BuildPhase_CHUNK.String(),
				Status:  apb.BuildStatus_BUILDING.String(),
			}
			b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
			err = b.ChunkDoc(docId, d)
			if err != nil {
				slog.Error("chunk doc to vector failed", slog.Any("err", err), slog.Any("file", d.RsPath))
				updGryRes(d.RsId, v1pb.GranaryStatus_FAILED, "", nil)
				blg.Status = apb.BuildStatus_FAILED.String()
				blg.Reason = err.Error()
				if len(blg.Reason) > 200 {
					blg.Reason = blg.Reason[:200]
				}
				b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
				b.Status = v1pb.GranaryStatus_FAILED
				updGry("", nil)
				return err

			} else {
				blg.Status = apb.BuildStatus_SUCCESS.String()
				b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
			}
		}
	}

	if options["graph"] == "true" {
		b.Status = v1pb.GranaryStatus_GRAPHING
		updGry("", nil)
		for _, d := range docs {
			urn := fmt.Sprintf("%s_%d_%s", docId, d.RsId, apb.BuildPhase_KG.String())
			if b.hasDocPhaseBuilded(docId, urn) {
				continue
			}

			// ragStore.GetChunk
			blg := &ragStore.BuildLog{
				ChunkId: urn,
				RsId:    d.RsId,
				Phase:   apb.BuildPhase_KG.String(),
				Status:  apb.BuildStatus_BUILDING.String(),
			}

			b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
			updGryRes(d.RsId, v1pb.GranaryStatus_GRAPHING, "", nil)
			err = b.BuildKG(docId, d.RsId)
			if err != nil {
				slog.Error("extract error", slog.Any("err", err))
				updGryRes(d.RsId, v1pb.GranaryStatus_FAILED, "", nil)
				blg.Status = apb.BuildStatus_FAILED.String()
				blg.Reason = err.Error()
				if len(blg.Reason) > 200 {
					blg.Reason = blg.Reason[:200]
				}
				b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
				b.Status = v1pb.GranaryStatus_FAILED
				updGry("", nil)
				return err
			} else {
				blg.Status = apb.BuildStatus_SUCCESS.String()
				b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
			}
		}
	}

	if options["summary"] == "true" {
		b.Status = v1pb.GranaryStatus_SUMMARIZING
		updGry("", nil)
		for _, d := range docs {
			urn := fmt.Sprintf("%s_%d_%s", docId, d.RsId, apb.BuildPhase_SUMMARY.String())
			if b.hasDocPhaseBuilded(docId, urn) {
				continue
			}
			slog.Info("summarizing doc", slog.Any("docId", docId), slog.Any("rsId", d.RsId))
			blg := &ragStore.BuildLog{
				ChunkId: urn,
				RsId:    d.RsId,
				Phase:   apb.BuildPhase_SUMMARY.String(),
				Status:  apb.BuildStatus_BUILDING.String(),
			}
			b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})

			updGryRes(d.RsId, v1pb.GranaryStatus_SUMMARIZING, "", nil)
			summary, tags, err := b.SummaryDocKnowledge(docId, d.RsId)
			if err != nil {
				slog.Error("summary error", slog.Any("err", err), slog.Any("docId", docId), slog.Any("rsId", d.RsId))
				b.Status = v1pb.GranaryStatus_FAILED
				updGry("", nil)
				blg.Status = apb.BuildStatus_FAILED.String()
				blg.Reason = err.Error()
				if len(blg.Reason) > 200 {
					blg.Reason = blg.Reason[:200]
				}
				b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
				b.Status = v1pb.GranaryStatus_FAILED
				updGry("", nil)
				return err
			} else {
				blg.Status = apb.BuildStatus_SUCCESS.String()
				b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
				updGryRes(d.RsId, v1pb.GranaryStatus_ENDED, summary, tags)
			}
		}

		fsummary, ftags, err := b.finalSummary(docId)
		if err != nil {
			slog.Error("final summary error", slog.Any("err", err), slog.Any("docId", docId))
			b.Status = v1pb.GranaryStatus_FAILED
			updGry("", nil)
			return err
		}
		b.Status = v1pb.GranaryStatus_ENDED
		slog.Info("build end for", slog.Any("docId", docId), slog.Any("summary", fsummary), slog.Any("tags", ftags))

		updGry(fsummary, ftags)
		return nil
	}
	slog.Info("build end for", slog.Any("docId", docId))
	b.Status = v1pb.GranaryStatus_ENDED
	updGry("", nil)
	return nil
}

type WebDocInfo struct {
	Title    string `json:"title"`
	Author   string `json:"author"`
	DateTime string `json:"datetime"`
	Link     string `json:"link"`
	Content  string `json:"content"`
}

func (b *RagBuilder) ProcessWebPage(docId string, content string, rsId int32) error {
	msgs := []llm.RoleContent{}
	msgs = append(msgs, llm.RoleContent{Role: llm.RoleSys, Content: FETCH_WEBD_CTX})
	msgs = append(msgs, llm.RoleContent{Role: llm.RoleUser, Content: content})
	req := &llm.CompleteReq{Msgs: msgs}
	ret, e := b.query.Complete(b.Ctx, req)
	if e != nil && ret == nil {
		slog.Error("query error", slog.Any("err", e))
		return e
	}
	docInfo := &WebDocInfo{}
	e = json.Unmarshal([]byte(ret.GetContent()), docInfo)
	if e != nil {
		slog.Error("unmarshal error", slog.Any("err", e))
		return e
	}
	var chunkVectors []*ragStore.ChunkVector
	docBase := docInfo.Title + docInfo.Author + docInfo.DateTime + docInfo.Link
	h := utils.HashString(docBase)
	urn := fmt.Sprintf("%d-%s", b.index, h)
	b.index += 1
	chunkVectors = append(chunkVectors, &ragStore.ChunkVector{
		Urn:     urn,
		RsId:    rsId,
		Content: docBase,
	})
	// insert webpage base info
	b.vectordb.InsertChunk(docId, chunkVectors, b.doEmbedding)
	// insert doc content
	b.chunkToVector(docId, docInfo.Content, rsId, true)
	return nil
}

func (b *RagBuilder) CreateCollection(docId string, dropExist bool) error {
	slog.Info("Begin create collections", slog.Any("docId", docId))
	err := b.vectordb.CreateEntityCollection(docId, dropExist)
	if err != nil {
		return err
	}

	err = b.vectordb.CreateRelationCollection(docId, dropExist)
	if err != nil {
		return err
	}

	err = b.vectordb.CreateChunkCollection(docId, dropExist)
	if err != nil {
		return err
	}

	err = b.vectordb.CreateDocSummaryCollection(docId, dropExist)
	if err != nil {
		return err
	}

	err = b.vectordb.CreateBuildLogCollection(docId, dropExist)
	if err != nil {
		return err
	}
	slog.Info("End create collections", slog.Any("docId", docId))
	return nil
}

func (b *RagBuilder) ChunkDocToVector(docId string, rsId int32, filepath string) error {
	extractor, err := ragFile.NewExtractor(docId, rsId, filepath)
	if err != nil {
		return err
	}

	b.CacheChunk[docId] = ""
	b.CacheText[docId] = ""
	extractor.ExtractContent(b.chunkToVector)
	return nil
}

func upsertEntity(entityMap map[string]*ragStore.BaseEntity, entity *ragStore.BaseEntity) {
	if entity == nil {
		return
	}

	curEntity, ok := entityMap[entity.Uid]
	if !ok {
		entityMap[entity.Name] = entity
		return
	}

	if len(curEntity.Kinds) == 0 {
		curEntity.Kinds = entity.Kinds
	} else {
		for _, kind := range entity.Kinds {
			update := true
			for _, curKind := range curEntity.Kinds {
				if kind == curKind {
					update = false
					break
				}
			}
			if update {
				curEntity.Kinds = append(curEntity.Kinds, kind)
			}
		}
	}

	if strings.Contains(curEntity.EntityDesc, entity.EntityDesc) {
		return
	} else if strings.Contains(entity.EntityDesc, curEntity.EntityDesc) {
		curEntity.EntityDesc = entity.EntityDesc
	} else {
		curEntity.EntityDesc += ("," + entity.EntityDesc)
	}
}

func upsertRelation(relationMap map[string]*ragStore.BaseRelation, relation *ragStore.Relation) {
	key := relation.SrcName + ":" + relation.TgtName
	cur, ok := relationMap[key]
	if ok {
		if strings.Contains(relation.RelDesc, cur.RelDesc) {
			cur.RelDesc = relation.RelDesc
			cur.RelKeyWords = relation.RelKeyWords
		} else if strings.Contains(cur.RelDesc, relation.RelDesc) {
			return
		} else {
			cur.RelDesc += relation.RelDesc
			cur.RelKeyWords += relation.RelKeyWords
		}

		relationMap[key] = cur

	} else {
		relationMap[key] = &relation.BaseRelation
	}

}

func (b *RagBuilder) buildKgChunk(docId string, rsId int32, ckv *ragStore.ChunkVector) error {
	urn := ckv.Urn + "_" + apb.BuildPhase_KG.String()
	if b.hasDocPhaseBuilded(docId, urn) {
		return nil
	}

	blg := &ragStore.BuildLog{
		ChunkId: urn,
		RsId:    rsId,
		Phase:   apb.BuildPhase_KG.String(),
		Status:  apb.BuildStatus_BUILDING.String(),
	}

	b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
	prompt, err := BuildExtractPrompt()
	if err != nil {
		return err
	}

	knowlegde, err := b.getRagKnowledge(prompt, ckv.Content)

	if err != nil {
		blg.Status = apb.BuildStatus_FAILED.String()
		blg.Reason = err.Error()
		if len(blg.Reason) > 200 {
			blg.Reason = blg.Reason[:200]
		}
		b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
		return err
	}

	slog.Debug("llm extract knowledge ", slog.Any("knowledge", knowlegde))

	marks := []string{DEFAULT_RECORD_DELIMITER, DEFAULT_COMPLETION_DELIMITER}
	records := splitStrByMarker(knowlegde, marks)
	if len(records) == 0 {
		return nil
	}

	slog.Info("extract chunk", slog.Any("chunkId", ckv.Urn), slog.Any("records", records))

	entityMap := make(map[string]*ragStore.BaseEntity)
	relationMap := make(map[string]*ragStore.BaseRelation)

	for _, record := range records {
		re := regexp.MustCompile(`\((.*)\)`)
		matches := re.FindStringSubmatch(record)
		if len(matches) < 1 {
			continue
		}
		record = matches[1]
		attrs := splitStrByMarker(record, []string{DEFAULT_TUPLE_DELIMITER})
		entity, _ := extractEntity(attrs, ckv.Urn, docId)
		if entity != nil {
			if !strings.Contains(ckv.Content, entity.Name) {
				continue
			}

			upsertEntity(entityMap, entity)
			continue
		}

		relation, _ := extractRelation(attrs, ckv.Content, docId)
		if relation != nil {
			if !strings.Contains(ckv.Content, relation.SrcName) {
				continue
			}
			upsertRelation(relationMap, relation)
		}
	}

	if len(entityMap) != 0 {
		err = b.UpsertEntityToGraph(entityMap)
		if err != nil {
			return err
		}
		b.UpsertEntityToVector(docId, entityMap)

	}

	if len(relationMap) != 0 {
		err = b.UpsertRelationToGraph(relationMap)
		b.UpsertRelationToVector(docId, relationMap)

		return nil
	}

	blg.Status = apb.BuildStatus_SUCCESS.String()
	b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})

	return nil
}

func (b *RagBuilder) chunkToVector(docId string, content string, rsId int32, last bool) error {
	slog.Info("extract knowledge", slog.Any("content", content))
	chunks := b.chunkContent(docId, content, last)
	if len(chunks) == 0 {
		return nil
	}

	var chunkVectors []*ragStore.ChunkVector
	for _, chunk := range chunks {
		if len(chunk) == 0 {
			continue
		}

		chunkId := utils.HashString(chunk)

		chunkId = fmt.Sprintf("%d_%s", b.index, utils.HashString(chunk))
		b.index += 1
		if len(chunkId) > 70 {
			chunkId = chunkId[:70]
		}
		hascks, err := b.vectordb.GetChunk(docId, []string{chunkId})
		if err != nil {
			return err
		}
		if len(hascks) > 0 {
			continue
		}

		chunkVectors = append(chunkVectors, &ragStore.ChunkVector{Urn: chunkId, RsId: rsId, Content: chunk})
	}

	if len(chunkVectors) > 0 {
		b.vectordb.InsertChunk(docId, chunkVectors, b.doEmbedding)
	}

	return nil
}

func (b *RagBuilder) BuildKG(docId string, rsId int32) error {
	b.vectordb.QueryChunkIterator(docId, rsId, b.buildKgChunk)
	return nil
}

// 更新实体
func (b *RagBuilder) UpsertEntityToGraph(entityMap map[string]*ragStore.BaseEntity) error {
	for key, entity := range entityMap {
		slog.Info("upsert entity to graph:", slog.Any("relation", entity))

		entites, _ := b.graphdb.GetNodeByName(entity.Name, entity.DocId)
		if entites == nil || len(*entites) == 0 {
			err := b.graphdb.UpsertNode(entity)
			if err != nil {
				slog.Error("upsert graph node failed", slog.Any("error", err))
				return err
			}
			entityMap[key] = entity
			b.updateEntityRelation(entity)
			continue
		}

		//先合并节点
		uids, update := mergeEntity(entites, entity)
		for _, uid := range uids {
			b.DeleteNode(uid)
		}
		if update {
			err := b.graphdb.UpsertNode(entity)
			if err != nil {
				slog.Error("upsert graph node failed", slog.Any("error", err))
			}
		}
		entityMap[key] = entity

		b.updateEntityRelation(entity)
	}

	return nil
}

func (b *RagBuilder) DeleteNode(uid string) error {
	slog.Info("delete node:", slog.Any("uid", uid))
	b.graphdb.DeleteNode(uid)

	relations, err := b.graphdb.GetReverseOweRelationByUid(uid)
	if err != nil {
		return err
	}

	var ids []string
	for _, relation := range *relations {
		ids = append(ids, relation.Uid)
	}
	err = b.graphdb.BatchDeleteOweEdge(ids, uid)
	if err != nil {
		return err
	}
	return nil
}

func (b *RagBuilder) DeleteRelation(uid string) error {
	slog.Info("delete relation:", slog.Any("uid", uid))

	b.graphdb.DeleteNode(uid)

	relations, err := b.graphdb.GetRelationEntityByUid(uid)
	if err != nil {
		return err
	}

	var ids []string
	for _, relation := range *relations {
		for _, o := range relation.Owns {
			ids = append(ids, o.Uid)
		}
	}
	err = b.graphdb.BatchDeleteOwnEdge(ids, uid)
	if err != nil {
		return err
	}
	return nil
}

func (b *RagBuilder) UpsertRelationToGraph(relationMap map[string]*ragStore.BaseRelation) error {
	for key, relation := range relationMap {
		slog.Info("upsert relation to graph:", slog.Any("relation", relation))
		relations, err := b.graphdb.GetRelation(relation.SrcName, relation.TgtName)
		if err != nil {
			slog.Error("get relation failed", slog.Any("error", err))
			continue
		}
		if relations == nil || len(*relations) == 0 {
			err := b.graphdb.UpsertRelation(relation)
			if err != nil {
				slog.Error("upsert graph node failed", slog.Any("error", err))
			}
			relationMap[key] = relation
			b.updateRelation(relation)
			continue
		}
		uids, update := mergeRelaition(relations, relation)
		for _, uid := range uids {
			b.DeleteRelation(uid)
		}

		if update {
			err := b.graphdb.UpsertRelation(relation)
			if err != nil {
				slog.Error("upsert graph node failed", slog.Any("error", err))
			}
		}
		relationMap[key] = relation

		b.updateRelation(relation)

	}

	return nil
}

func mergeRelaition(relations *[]ragStore.Relation, relation *ragStore.BaseRelation) ([]string, bool) {
	uids := []string{}
	update := false
	if len(*relations) > 0 {
		relation.Uid = (*relations)[0].Uid
	}

	for _, r := range *relations {
		if r.RelDesc == relation.RelDesc {
			continue
		} else if strings.Contains(r.RelDesc, relation.RelDesc) {
			relation.RelDesc = r.RelDesc
		} else if strings.Contains(relation.RelDesc, r.RelDesc) {
			update = true
			//do nothing
		} else {
			relation.RelDesc = relation.RelDesc + r.RelDesc
			update = true

		}
		if relation.Uid != r.Uid {
			uids = append(uids, r.Uid)
		}
	}
	return uids, update
}

// 更新实体的关系链
func (b *RagBuilder) updateEntityRelation(entity *ragStore.BaseEntity) {
	owns, err := b.graphdb.GetOwnRelation(entity.Name)
	if err != nil {
		slog.Error("get own relation failed", slog.Any("error", err))
	} else {
		for _, own := range *owns {
			slog.Info("set own edge:", slog.Any(entity.Uid, own.Uid))
			b.graphdb.SetOwnEdge(entity.Uid, own.Uid)
		}
	}

	owes, err := b.graphdb.GetOweRelation(entity.Name)
	if err != nil {
		slog.Error("get own relation failed", slog.Any("error", err))
	} else {
		for _, owe := range *owes {
			slog.Info("set owe edge:", slog.Any(entity.Uid, owe.Uid))
			b.graphdb.SetOweEdge(entity.Uid, owe.Uid)
		}
	}
}

func (b *RagBuilder) updateRelation(relation *ragStore.BaseRelation) {
	owns, err := b.graphdb.GetNodeByName(relation.SrcName, relation.DocId)
	if err != nil {
		slog.Error("get own relation failed", slog.Any("error", err))
	} else {
		for _, e := range *owns {
			slog.Info("set own edge:", slog.Any(e.Uid, relation.Uid))

			b.graphdb.SetOwnEdge(e.Uid, relation.Uid)
		}
	}

	owes, err := b.graphdb.GetNodeByName(relation.TgtName, relation.DocId)
	if err != nil {
		slog.Error("get own relation failed", slog.Any("error", err))
	} else {
		for _, e := range *owes {
			slog.Info("set owe edge:", slog.Any(e.Uid, relation.Uid))
			b.graphdb.SetOweEdge(e.Uid, relation.Uid)
		}
	}
}

// 合并实体
func mergeEntity(entites *[]ragStore.Entity, entity *ragStore.BaseEntity) ([]string, bool) {
	uids := []string{}
	update := false
	if len(*entites) > 0 {
		entity.Uid = (*entites)[0].Uid
	}

	for _, e := range *entites {
		if e.EntityDesc == entity.EntityDesc {
			continue
		} else if strings.Contains(e.EntityDesc, entity.EntityDesc) {
			entity.EntityDesc = e.EntityDesc
		} else if strings.Contains(entity.EntityDesc, e.EntityDesc) {
			update = true
			//do nothing
		} else {
			entity.EntityDesc = e.EntityDesc + entity.EntityDesc
			update = true
		}
		if e.Uid != entity.Uid {
			uids = append(uids, e.Uid)
		}
	}

	return uids, update
}

func (b *RagBuilder) UpsertEntityToVector(docId string, entityMap map[string]*ragStore.BaseEntity) error {
	docs := []*ragStore.DocVector{}
	for _, relation := range entityMap {
		docVec := &ragStore.DocVector{
			Urn:     relation.Uid,
			SrcName: relation.Name,
			Content: relation.EntityDesc + "," + relation.EntityKeyWords,
		}
		docs = append(docs, docVec)
	}

	_, err := b.vectordb.InsertEntity(docId, docs, b.doEmbedding)
	if err != nil {
		slog.Error("upsert vector failed", slog.Any("error", err))
		return err
	}
	return nil
}

func (b *RagBuilder) UpsertRelationToVector(docId string, relationMap map[string]*ragStore.BaseRelation) error {
	docs := []*ragStore.DocVector{}
	for _, relation := range relationMap {
		slog.Info("upsert relation to vector:", slog.Any("relation", relation))

		docVec := &ragStore.DocVector{
			Urn:     relation.Uid,
			SrcName: relation.SrcName,
			Content: relation.RelDesc + "," + relation.RelKeyWords,
			TgtName: relation.TgtName,
		}
		docs = append(docs, docVec)
	}

	_, err := b.vectordb.InsertRelation(docId, docs, b.doEmbedding)
	if err != nil {
		slog.Error("upsert vector failed", slog.Any("error", err))
		return err
	}
	return nil
}

func cleanStr(input string) string {
	result := html.UnescapeString(strings.TrimSpace(input))

	// 定义一个正则表达式以移除控制字符和其他不需要的字符
	re := regexp.MustCompile(`[\x00-\x1f\x7f-\x9f]`)
	cleaned := re.ReplaceAllString(result, "")
	cleaned = strings.Trim(cleaned, `"`)
	cleaned = strings.Trim(cleaned, `'`)
	return cleaned
}

func extractEntity(records []string, chunkId string, docId string) (*ragStore.BaseEntity, error) {
	if len(records) < 4 || records[0] != `"entity"` {
		return nil, nil
	}

	// # add this record as a node in the G
	entityName := cleanStr(strings.ToUpper(records[1]))
	if len(strings.Trim(entityName, "")) == 0 {
		return nil, nil
	}

	entityType := cleanStr(strings.ToUpper(records[2]))
	entityDesc := cleanStr(strings.ToUpper(records[3]))
	entity := &ragStore.BaseEntity{
		Uid:        "_:" + entityName,
		Name:       entityName,
		DType:      []string{"Entity"},
		DocId:      docId,
		EntityDesc: entityDesc,
		ChunkId:    chunkId,
	}

	entity.Kinds = append(entity.Kinds, entityType)
	return entity, nil
}

func extractRelation(records []string, chunkId string, docId string) (*ragStore.Relation, error) {
	if len(records) < 4 || records[0] != `"relationship"` {
		return nil, nil
	}

	// # add this record as a node in the G
	srcName := cleanStr(strings.ToUpper(records[1]))
	if len(strings.Trim(srcName, "")) == 0 {
		return nil, nil
	}

	tgtName := cleanStr(strings.ToUpper(records[2]))
	relDesc := cleanStr(strings.ToUpper(records[3]))

	relKeyword := ""
	if len(records) > 4 {
		relKeyword = cleanStr(strings.ToUpper(records[4]))
	} else {
		relKeyword = ""
	}

	tgtEntity := ragStore.Entity{
		BaseEntity: ragStore.BaseEntity{Name: tgtName,
			ChunkId: chunkId},
	}

	name := srcName + "_" + tgtName
	relation := &ragStore.Relation{
		BaseRelation: ragStore.BaseRelation{
			Uid:         "_:" + name,
			Name:        name,
			RelDesc:     relDesc,
			SrcName:     srcName,
			TgtName:     tgtName,
			DType:       []string{"Relation"},
			DocId:       docId,
			RelKeyWords: relKeyword,
		},
		Owes: []ragStore.Entity{tgtEntity},
	}

	return relation, nil
}

var punctuations = map[rune]bool{
	'。': true, '！': true, '？': true, '；': true, '，': true, '：': true,
	'.': true, '!': true, '?': true, ';': true, ',': true, ':': true,
	'\n': true, '、': true, ' ': true,
}

func findChunkPos(text string, startPos int, chunkLen int, minLen int) int {
	// 满足最小切片的条件
	if startPos == 0 && len(text)-startPos < chunkLen {
		return -1
	}

	// 计算最大结束位置
	endIndex := startPos + chunkLen
	if endIndex >= len(text) {
		endIndex = len(text) - 1
	}

	// 如果结束位置小于文本长度
	if endIndex < len(text) {
		// 从最大长度位置向前查找最近的标点符号
		foundPunctuation := false
		for i := startPos; i < endIndex; i++ {
			if punctuations[rune(text[i])] {
				endIndex = i + 1 // 包含标点符号
				foundPunctuation = true
				break
			}
		}

		if !foundPunctuation {
			return -1
		}
	} else {
		return -1
	}

	return endIndex
}

func (b *RagBuilder) chunkContent(docId string, content string, last bool) []string {
	input := b.CacheText[docId] + content
	if len(input) <= b.ChunkTokens && last {
		b.CacheText[docId] = ""
		return []string{input}
	}

	if len(input) <= b.ChunkTokens*3 {
		b.CacheText[docId] = input
		return []string{}
	}

	chunks := []string{}
	pos := b.ChunkTokens - 10
	if len(b.CacheChunk[docId]) > 0 {
		pos = 0
	}

	start := 0
	for pos != -1 && start < len(input) {
		chunk := ""
		if pos > 0 {
			chunk = input[start:pos]

			b.tokensCount += len(b.Tkm.Encode(chunk, nil, nil))
			b.CacheChunk[docId] += chunk
		}

		if b.tokensCount >= b.ChunkTokens {
			slog.Info("rag builder chunk content", slog.Any("docId", docId), slog.Any("chunk", b.CacheChunk[docId]))
			chunks = append(chunks, b.CacheChunk[docId])
			b.CacheChunk[docId] = ""
			b.tokensCount = 0
		}

		start = pos
		pos = findChunkPos(input, start, b.ChunkTokens, 10)
	}

	chunkRest := func(last bool) {
		rest := input[start:]
		tks := b.Tkm.Encode(rest, nil, nil)
		tkc := len(tks)
		if tkc >= b.ChunkTokens {
			startIndex := 0
			endIndex := b.ChunkTokens
			for startIndex < tkc {
				cks := b.Tkm.Decode(tks[startIndex:endIndex])
				chunks = append(chunks, cks)
				startIndex = endIndex
				endIndex += b.ChunkTokens
				if endIndex > tkc {
					endIndex = tkc
				}
			}
			b.CacheText[docId] = ""
		} else {
			if last {
				chunks = append(chunks, rest)
				b.CacheText[docId] = ""
			} else {
				b.CacheText[docId] = rest
			}
		}
	}

	if len(input[start:]) > 0 {
		if last {
			b.CacheText[docId] = ""

			b.tokensCount = 0
			if b.CacheChunk[docId] != "" {
				chunks = append(chunks, b.CacheChunk[docId])
				b.CacheChunk[docId] = ""
			}

			if input[start:] != "" {
				chunkRest(last)
			}
		} else {
			chunkRest(false)
			// rest := input[start:]
			// tks := b.Tkm.Encode(rest, nil, nil)
			// tkc := len(tks)
			// if tkc >= b.ChunkTokens {
			// 	startIndex := 0
			// 	endIndex := b.ChunkTokens
			// 	for startIndex < tkc {
			// 		cks := b.Tkm.Decode(tks[startIndex:endIndex])
			// 		chunks = append(chunks, cks)
			// 		startIndex = endIndex
			// 		endIndex += b.ChunkTokens
			// 		if endIndex > tkc {
			// 			endIndex = tkc
			// 		}
			// 	}
			// 	b.CacheText[docId] = ""
			// } else {
			// 	b.CacheText[docId] = rest
			// }
		}
	} else {
		b.CacheText[docId] = ""
	}

	return chunks
}

func (b *RagBuilder) getRagKnowledge(prompt string, content string) (string, error) {
	ctx := context.Background()
	msgs := []llm.RoleContent{{Role: llm.RoleSys, Content: prompt}}
	msgs = append(msgs, llm.RoleContent{Role: llm.RoleUser, Content: content})
	ret, err := b.query.Complete(ctx, &llm.CompleteReq{Msgs: msgs})

	if err != nil || ret == nil {
		return "", err
	}
	extraContent := ret.GetContent()
	msgs = append(msgs, llm.RoleContent{Role: llm.RoleAssistant, Content: extraContent})
	for i := 0; i < MAX_ITERATIONS; i++ {
		msgs = append(msgs, llm.RoleContent{Role: llm.RoleUser, Content: entiti_continue_extraction})
		k, err := b.query.Complete(ctx, &llm.CompleteReq{Msgs: msgs})
		if err != nil || k == nil {
			break
		}
		extraContent += k.GetContent()
		if i == MAX_ITERATIONS-1 {
			break
		}

		msgs = append(msgs, llm.RoleContent{Role: llm.RoleAssistant, Content: k.GetContent()})
		msgs = append(msgs, llm.RoleContent{Role: llm.RoleUser, Content: entiti_if_loop_extraction})
		loopRet, err := b.query.Complete(ctx, &llm.CompleteReq{Msgs: msgs})
		if err != nil || loopRet == nil {
			break
		}
		loopCheck := strings.Trim(loopRet.GetContent(), " ")
		loopCheck = strings.Trim(loopCheck, `'`)
		loopCheck = strings.Trim(loopCheck, `"`)
		loopCheck = strings.ToLower(loopCheck)
		if loopCheck != "yes" {
			break
		}
	}
	return extraContent, nil
}

func BuildExtractPrompt() (string, error) {
	type Prompt struct {
		Tuple_delimiter      string
		Record_delimiter     string
		Completion_delimiter string
		Entity_types         string
	}

	Prompts := Prompt{
		Tuple_delimiter:      DEFAULT_TUPLE_DELIMITER,
		Record_delimiter:     DEFAULT_RECORD_DELIMITER,
		Completion_delimiter: DEFAULT_COMPLETION_DELIMITER,
		Entity_types:         DEFAULT_ENTITY_TYPES,
	}

	tmpl, err := template.New("prompt").Parse(ENTITY_EXTRACTION2)
	if err != nil {
		return "", err
	}

	buf := bytes.Buffer{}
	err = tmpl.Execute(&buf, Prompts)
	if err != nil {
		return "", err
	}
	return buf.String(), nil
}

func splitStrByMarker(content string, markers []string) []string {
	var escapedMarkers []string
	for _, marker := range markers {
		escapedMarkers = append(escapedMarkers, regexp.QuoteMeta(marker))
	}
	pattern := strings.Join(escapedMarkers, "|")

	// Compile the pattern into a Regexp.
	re := regexp.MustCompile(pattern)

	// Use the Regexp to split the content.
	return re.Split(content, -1)

}

func f64Tof32(slice []float64) []float32 {
	dst := make([]float32, len(slice))
	for i, v := range slice {
		dst[i] = float32(v)
	}

	return dst
}

func (b *RagBuilder) doEmbedding(ctx context.Context, content string) []float32 {
	req := &llm.EmbeddingReq{
		Prompt: content,
	}
	if b.embedding == nil {
		return []float32{}
	}

	v, err := b.embedding.Embedding(ctx, req)
	if err != nil {
		slog.Error("embedding error:", slog.Any("err", err))
		return nil
	}

	return f64Tof32(v)
}

func (b *RagBuilder) SummaryDocKnowledge(docId string, rsId int32) (string, []string, error) {
	b.vectordb.QueryChunkIterator(docId, rsId, b.summarizeChunk)
	return b.docSummary(docId, rsId)
}

func BuildSummaryPrompt(input string, prompTmpl string) (string, error) {
	type Prompt struct {
		Article string
	}

	Prompts := Prompt{
		Article: input,
	}

	tmpl, err := template.New("prompt").Parse(prompTmpl)
	if err != nil {
		return "", err
	}

	buf := bytes.Buffer{}
	err = tmpl.Execute(&buf, Prompts)
	if err != nil {
		return "", err
	}
	return buf.String(), nil
}

func (b *RagBuilder) UnmarshalLLMResponse(ctx context.Context, prompt string, decode func(msg string) error) error {

	maxRetries := 10
	for retries := 0; retries < maxRetries; retries++ {
		knowledge, err := b.QueryLLMBasic(ctx, prompt)
		if err != nil {
			continue
		}

		slog.Info("llm response ", slog.Any("info", knowledge))

		begin := strings.Index(knowledge, "{")
		end := strings.LastIndex(knowledge, "}")
		if begin == -1 || end == -1 {
			continue
		}

		jsonStr := knowledge[begin : end+1]
		if err := decode(jsonStr); err != nil {
			continue
		}

		return nil
	}

	return fmt.Errorf("failed to unmarshal LLM response after %d retries", maxRetries)
}

func (b *RagBuilder) summarizeChunk(docId string, rsId int32, ckv *ragStore.ChunkVector) error {
	urn := ckv.Urn + "_" + apb.BuildPhase_SUMMARY.String()
	if b.hasDocPhaseBuilded(docId, urn) {
		return nil
	}

	cks, err := b.vectordb.GetDocSummary(docId, rsId, []string{ckv.Urn})
	if err != nil {
		return err
	}
	if len(cks) > 0 {
		return nil
	}

	blg := &ragStore.BuildLog{
		ChunkId: urn,
		RsId:    rsId,
		Phase:   apb.BuildPhase_SUMMARY.String(),
		Status:  apb.BuildStatus_BUILDING.String(),
	}
	b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})

	prompt, err := BuildSummaryPrompt(ckv.Content, SUMMARIZE_DOC_DESCRIPTIONS)
	if err != nil {
		return err
	}
	ctx := context.Background()
	chunkGranary := &ragStore.ChunkSummary{
		ChunkId: ckv.Urn,
		RsId:    ckv.RsId,
	}
	granarySummary := &cpb.TopicDef{}

	decode := func(msg string) error {
		return protojson.Unmarshal([]byte(msg), granarySummary)
	}

	err = b.UnmarshalLLMResponse(ctx, prompt, decode)
	if err != nil {
		blg.Status = apb.BuildStatus_FAILED.String()
		blg.Reason = err.Error()
		if len(blg.Reason) > 200 {
			blg.Reason = blg.Reason[:200]
		}
		b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
		return err
	}
	chunkGranary.Summary = granarySummary

	b.vectordb.InsertDocSummary(docId, []*ragStore.ChunkSummary{chunkGranary}, b.doEmbedding)

	if err != nil {
		blg.Status = apb.BuildStatus_FAILED.String()
		blg.Reason = err.Error()
		if len(blg.Reason) > 200 {
			blg.Reason = blg.Reason[:200]
		}
		b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
		return err
	}

	blg.Status = apb.BuildStatus_SUCCESS.String()
	b.vectordb.UpsertBuildLog(docId, []*ragStore.BuildLog{blg})
	return nil
}

func SummaryToCSV(records []*ragStore.ChunkSummary) string {
	// 创建一个csv.Writer实例
	var cks strings.Builder

	cksWriter := csv.NewWriter(&cks)

	// 将结构体字段写入CSV标题行
	if err := cksWriter.Write([]string{"Title", "Content"}); err != nil {
		panic(err)
	}

	// 将每个结构体实例转换为CSV记录行并写入
	for _, record := range records {
		row := []string{record.Summary.Primary.Title, record.Summary.Primary.Summary}
		if err := cksWriter.Write(row); err != nil {
			panic(err)
		}
		for _, unit := range record.Summary.Units {
			row := []string{unit.Title, unit.Summary}
			if err := cksWriter.Write(row); err != nil {
				panic(err)
			}
		}
	}

	cksWriter.Flush()
	if err := cksWriter.Error(); err != nil {
		panic(err)
	}

	return cks.String()
}

func (b *RagBuilder) docSummary(docId string, rsId int32) (string, []string, error) {
	cksummary, err := b.vectordb.QueryDocSummary(docId, rsId)
	if err != nil {
		return "", nil, err
	}
	if len(cksummary) < 1 {
		return "", nil, nil
	}

	allContent := SummaryToCSV(cksummary)
	prompt, err := BuildSummaryPrompt(allContent, FINAL_SUMMARIZE_DOC_DESCRIPTIONS)
	if err != nil {
		return "", nil, err
	}
	ctx := context.Background()
	finalTopics := &v1pb.FinalGranaryTopics{}
	decode := func(msg string) error {
		return protojson.Unmarshal([]byte(msg), finalTopics)
	}
	err = b.UnmarshalLLMResponse(ctx, prompt, decode)
	if err != nil {
		return "", nil, err
	}

	cks := []*ragStore.ChunkSummary{}
	for index, t := range finalTopics.Topics {
		h := utils.HashString(t.Primary.Title + t.Primary.Summary)
		chunkGranary := &ragStore.ChunkSummary{
			ChunkId: fmt.Sprintf("%d_%s", index, h),
			Summary: t,
			RsId:    rsId,
		}
		cks = append(cks, chunkGranary)
	}
	b.vectordb.DeleteDocSummary(docId, rsId)
	b.vectordb.InsertDocSummary(docId, cks, b.doEmbedding)

	cksummary, err = b.vectordb.QueryDocSummary(docId, rsId)
	if err != nil {
		return "", nil, err
	}
	if len(cksummary) < 1 {
		return "", nil, nil
	}

	allContent = SummaryToCSV(cksummary)
	prompt, err = BuildSummaryPrompt(allContent, SUMMARIZE_TAG_DOC_DESCRIPTIONS)
	if err != nil {
		return "", nil, err
	}

	summaryTags := &v1pb.DocSummary{}
	decode2 := func(msg string) error {
		return protojson.Unmarshal([]byte(msg), summaryTags)
	}

	err = b.UnmarshalLLMResponse(ctx, prompt, decode2)
	if err != nil {
		return "", nil, err
	}

	return summaryTags.Summary, summaryTags.Tags, nil
}

func (b *RagBuilder) finalSummary(docId string) (string, []string, error) {
	cksummary, err := b.vectordb.QueryDocSummary(docId, 0)
	if err != nil {
		return "", nil, err
	}
	if len(cksummary) < 1 {
		return "", nil, nil
	}

	allContent := SummaryToCSV(cksummary)
	prompt, err := BuildSummaryPrompt(allContent, FINAL_SUMMARIZE_DOC_DESCRIPTIONS)
	if err != nil {
		return "", nil, err
	}

	ctx := context.Background()
	finalTopics := &v1pb.FinalGranaryTopics{}
	decode := func(msg string) error {
		return protojson.Unmarshal([]byte(msg), finalTopics)
	}
	err = b.UnmarshalLLMResponse(ctx, prompt, decode)
	if err != nil {
		return "", nil, err
	}

	cks := []*ragStore.ChunkSummary{}
	for index, t := range finalTopics.Topics {
		h := utils.HashString(t.Primary.Title + t.Primary.Summary)
		chunkGranary := &ragStore.ChunkSummary{
			ChunkId: fmt.Sprintf("%d_%s", index, h),
			Summary: t,
		}
		cks = append(cks, chunkGranary)
	}
	b.vectordb.CreateGSummaryCollection(docId, true)
	b.vectordb.InsertGSummary(docId, cks, b.doEmbedding)

	cksummary, err = b.vectordb.QueryGSummary(docId)
	if err != nil {
		return "", nil, err
	}
	if len(cksummary) < 1 {
		return "", nil, nil
	}

	allContent = SummaryToCSV(cksummary)
	prompt, err = BuildSummaryPrompt(allContent, SUMMARIZE_TAG_DOC_DESCRIPTIONS)
	if err != nil {
		return "", nil, err
	}

	summaryTags := &v1pb.DocSummary{}
	decode2 := func(msg string) error {
		return protojson.Unmarshal([]byte(msg), summaryTags)
	}
	err = b.UnmarshalLLMResponse(ctx, prompt, decode2)
	if err != nil {
		return "", nil, err
	}
	return summaryTags.Summary, summaryTags.Tags, nil
}
