package kbpost

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"strconv"
	"strings"

	esmodel "mall/common/esmodel/blog"
	"mall/common/globalkey"
	"mall/common/modelx"
	"mall/common/resultx"
	"mall/common/utils/pointy"
	"mall/service/blog/model"
	"mall/service/blog/rpc/internal/svc"
	"mall/service/blog/rpc/types/blog"

	"github.com/sirupsen/logrus"
	"github.com/zeromicro/go-zero/core/logx"
)

type GetKBPostListTmpLogic struct {
	ctx    context.Context
	svcCtx *svc.ServiceContext
	logx.Logger
}

func NewGetKBPostListTmpLogic(ctx context.Context, svcCtx *svc.ServiceContext) *GetKBPostListTmpLogic {
	return &GetKBPostListTmpLogic{
		ctx:    ctx,
		svcCtx: svcCtx,
		Logger: logx.WithContext(ctx),
	}
}

func (l *GetKBPostListTmpLogic) GetKBPostListTmp(in *blog.BaseListReq) (*blog.TmpKBPostListResp, error) {
	if in.Filter == nil || len(*in.Filter) == 0 {
		return nil, resultx.StatusError(resultx.REUQEST_PARAM_ERROR, "")
	}

	preloadMap, eagerLoadMap := modelx.ParsePreloadAndEagerLoad(in.Preload, in.EagerLoad)
	// logrus.Info(fmt.Sprintf("GetKBPostListTmp preloadMap: %+v", preloadMap))
	// map[blogBook:[Id Title Slug] blogBookMeta:[mediaId mediaUrl]]
	// logrus.Info(fmt.Sprintf("GetKBPostListTmp eagerLoadMap: %+v", eagerLoadMap))
	// map[blogBook:[blogBookMeta]]

	kbPostEntityName := model.RpcEntityNames.BlogBook
	if _, ok := model.RpcEntityPreloadMap[kbPostEntityName]; !ok {
		return &blog.TmpKBPostListResp{}, nil
	}

	kbPostRepositoryName := model.RepositoryNames.BlogBook
	columnMap, ok := model.RepositoryPreloadMap[kbPostRepositoryName]
	if !ok {
		return &blog.TmpKBPostListResp{}, nil
	}

	var count int64
	var err error
	var preFilteredByReactionLog bool
	var reactionLogs []*model.BlogReactionLog
	var pids []any
	if in.PreFilterEntity != nil && in.PreFilter != nil {
		blogRelationEntityName := model.RpcEntityNames.BlogBookRelation
		blogReactionLogEntityName := model.RpcEntityNames.BlogReactionLog
		switch *in.PreFilterEntity {
		case blogRelationEntityName:
			if _, ok := model.RpcEntityPreloadMap[blogRelationEntityName]; ok {
				blogRelationRepositoryName := model.RepositoryNames.BlogBookRelation
				if preColumnMap, ok := model.RepositoryPreloadMap[blogRelationRepositoryName]; ok {
					filterMap := modelx.ParseFilter(in.PreFilter, preColumnMap)
					logrus.Info(fmt.Sprintf("GetKBPostListTmp preFilterMap: %+v", filterMap))
					// map[to_entry_id:map[equalTo:[46665]] to_section_id:map[equalTo:[18]]]
					if filterMap == nil {
						return nil, resultx.StatusError(resultx.REUQEST_PARAM_ERROR, "")
					}

					res, _, err := l.svcCtx.KBEntry.GetRelationshipList(l.ctx, model.M{
						"offset": in.Offset,
						"limit":  in.Limit,
					}, 2, filterMap)
					if err != nil {
						return nil, err
					}

					if len(res) == 0 {
						return &blog.TmpKBPostListResp{}, nil
					}

					pidMap := make(map[uint64]struct{})
					for _, v := range res {
						if _, ok := pidMap[v.FromEntryID]; ok { // 防重复
							continue
						}

						pidMap[v.FromEntryID] = struct{}{}
						pids = append(pids, v.FromEntryID)
					}
				}
			}
		case blogReactionLogEntityName:
			if _, ok := model.RpcEntityPreloadMap[blogReactionLogEntityName]; ok {
				blogReactionLogRepositoryName := model.RepositoryNames.BlogReactionLog
				if preColumnMap, ok := model.RepositoryPreloadMap[blogReactionLogRepositoryName]; ok {
					filterMap := modelx.ParseFilter(in.PreFilter, preColumnMap)
					// logrus.Info(fmt.Sprintf("GetKBPostListTmp preFilterMap: %+v", filterMap))
					// map[status:map[equalTo:[1]] type:map[equalTo:[want]] user_id:map[equalTo:[10021]]]
					if filterMap == nil {
						return nil, resultx.StatusError(resultx.REUQEST_PARAM_ERROR, "")
					}

					res, _, err := l.svcCtx.Reaction.GetList(l.ctx, model.M{
						"offset": in.Offset,
						"limit":  in.Limit,
						"latest": "1",
					}, 2, filterMap)
					if err != nil {
						return nil, err
					}

					if len(res) == 0 {
						return &blog.TmpKBPostListResp{}, nil
					}

					pidMap := make(map[uint64]struct{})
					preFilteredByReactionLog = true
					count = 0
					for _, v := range res {
						if int32(v.Status) != globalkey.StatusEnabled {
							continue
						}

						if _, ok := pidMap[v.PostID]; ok { // 防重复
							continue
						}

						pidMap[v.PostID] = struct{}{}
						pids = append(pids, v.PostID)
						count += 1
						reactionLogs = append(reactionLogs, v)
					}
				}
			}
		}
	}

	var selectCols []string
	if preloadFields, ok := preloadMap[kbPostEntityName]; ok {
		selectCols = modelx.ParseSelectColumns(preloadFields, columnMap)
	}

	filterMap := modelx.ParseFilter(in.Filter, columnMap)
	// logrus.Info(fmt.Sprintf("GetKBPostListTmp filterMap: %+v", filterMap))
	// map[ID:map[in:[47067 40101]] post_status:map[equalTo:[1]]]
	if filterMap == nil {
		if len(pids) == 0 {
			return nil, resultx.StatusError(resultx.REUQEST_PARAM_ERROR, "")
		}

		filterMap = make(map[string]map[string][]any)
		filterMap[model.BlogKnowledgeBasisColumns.PostStatus] = map[string][]any{
			"equalTo": {globalkey.StatusEnabled},
		}
	}

	orderBy := modelx.ParseSorter(in.Sorter, columnMap)
	// logrus.Info(fmt.Sprintf("GetKBPostListTmp orderBy: %s", orderBy))
	if len(orderBy) == 0 {
		orderBy = model.BlogKnowledgeBasisColumns.ID + " DESC"
	}

	var keyword string
	var eids []any
	if in.Search != nil && len(strings.TrimSpace(*in.Search)) > 0 {
		var page uint64 = 1
		var pageSize uint64 = 10
		if in.Page != nil && *in.Page > 1 {
			page = *in.Page
		}
		if in.PageSize != nil && *in.PageSize > 1 {
			pageSize = *in.PageSize
		}
		ids, err := l.search(strings.TrimSpace(*in.Search), page, pageSize)
		if err != nil {
			logx.Infow("GetKBPostListTmp search err", logx.Field("detail", err.Error()))
			// return nil, err
		}
		if len(ids) > 0 {
			for _, id := range ids {
				if id > 0 {
					eids = append(eids, id)
				}
			}
		} else {
			keyword = strings.TrimSpace(*in.Search)
		}
	}

	var res []*model.BlogKnowledgeBasis
	if len(pids) > 0 {
		filterMap[model.BlogKnowledgeBasisColumns.ID] = map[string][]any{
			"in": pids,
		}
		res, _, err = l.svcCtx.KBEntry.GetList(l.ctx, model.M{
			"orderBy": orderBy,
			"select":  strings.Join(selectCols, ","),
		}, -1, filterMap)
	} else {
		if len(eids) > 0 {
			filterMap[model.BlogKnowledgeBasisColumns.ID] = map[string][]any{
				"in": eids,
			}
		}
		res, count, err = l.svcCtx.KBEntry.GetList(l.ctx, model.M{
			"offset":  in.Offset,
			"limit":   in.Limit,
			"search":  keyword,
			"orderBy": orderBy,
			"select":  strings.Join(selectCols, ","),
		}, 2, filterMap)
	}
	if err != nil {
		return nil, err
	}

	hasNextPage, hasPreviousPage := modelx.ParseCount(count)
	resp := &blog.TmpKBPostListResp{
		HasNextPage:     &hasNextPage,
		HasPreviousPage: &hasPreviousPage,
	}

	var total int64
	if len(res) > 0 {
		var ids []any
		for _, v := range res {
			ids = append(ids, v.ID)
		}

		subEntitySettingMap := make(map[string]map[string]*model.ColumnSetting)
		if eagerLoadEntities, ok := eagerLoadMap[kbPostEntityName]; ok {
			for _, entity := range eagerLoadEntities {
				if entity == "total" {
					_, total, err = l.svcCtx.KBEntry.GetList(l.ctx, model.M{}, 0, filterMap)
					if err != nil {
						total = 0
					}
					continue
				}

				if entitySettingMap, ok := model.RpcEntityPreloadMap[entity]; ok {
					subEntitySettingMap[entity] = entitySettingMap
				}
			}
		}

		idMediaMap := make(map[uint64][]*blog.TmpMediaInfo)
		blogMediaEntityName := model.RpcEntityNames.BlogMedia
		if _, ok := subEntitySettingMap[blogMediaEntityName]; ok {
			blogMediaRepositoryName := model.RepositoryNames.BlogMedia
			subPreloadFields, hasSubPreloadFields := preloadMap[blogMediaEntityName]
			subColumnMap, hasSubColumnMap := model.RepositoryPreloadMap[blogMediaRepositoryName]
			if hasSubPreloadFields && hasSubColumnMap {
				subSelectCols := modelx.ParseSelectColumns(subPreloadFields, subColumnMap)

				subFilterMap := make(map[string]map[string][]any)
				subFilterMap[model.BlogAttachmentRelationshipColumns.ObjectID] = map[string][]any{
					"in": ids,
				}
				subFilterMap[model.BlogAttachmentRelationshipColumns.ObjectType] = map[string][]any{
					"equalTo": {globalkey.AttachmentLogTypeKnowledgeBase},
				}
				subFilterMap[model.BlogAttachmentRelationshipColumns.AttachmentThumbnail] = map[string][]any{
					"equalTo": {1},
				}
				subSelectCols = append(subSelectCols, model.BlogAttachmentRelationshipColumns.ObjectID)
				orderBy := model.BlogAttachmentRelationshipColumns.AttachmentOrder
				list, _, err := l.svcCtx.KBEntry.GetMedia(l.ctx, model.M{
					"orderBy": orderBy,
					"select":  strings.Join(subSelectCols, ","),
				}, -1, subFilterMap)
				if err != nil {
					return nil, err
				}

				if len(list) > 0 {
					for _, v := range list {
						idMediaMap[v.ObjectID] = append(idMediaMap[v.ObjectID], &blog.TmpMediaInfo{
							Uid:       &v.AttachmentRelationshipID,
							Id:        pointy.GetPointer(strconv.FormatInt(int64(v.AttachmentID), 10)),
							Thumbnail: pointy.GetPointer(uint32(v.AttachmentThumbnail)),
							Order:     pointy.GetPointer(uint32(v.AttachmentOrder)),
							Uri:       &v.AttachmentSource,
							Url:       &v.AttachmentImageURL,
							AltText:   &v.AttachmentImageAlt,
							MetaData:  &v.AttachmentImageMeta,
						})
					}
				}
			}
		}

		idMetaMap := make(map[uint64][]*blog.MetaInfo)
		blogMetaEntityName := model.RpcEntityNames.BlogBookMeta
		if entitySettingMap, ok := subEntitySettingMap[blogMetaEntityName]; ok {
			if subPreloadFields, ok := preloadMap[blogMetaEntityName]; ok {
				var metaKeys []any
				metaKeyMap := make(map[string]string)
				for _, field := range subPreloadFields {
					if fieldSetting, ok := entitySettingMap[field]; ok && len(fieldSetting.Name) > 0 {
						metaKeys = append(metaKeys, fieldSetting.Name)
						metaKeyMap[fieldSetting.Name] = field
					}
				}
				if len(metaKeys) > 0 {
					subFilterMap := make(map[string]map[string][]any)
					subFilterMap[model.BlogKnowledgeBaseMetumColumns.KnowledgeBaseID] = map[string][]any{
						"in": ids,
					}
					subFilterMap[model.BlogKnowledgeBaseMetumColumns.MetaKey] = map[string][]any{
						"in": metaKeys,
					}
					orderBy := model.BlogKnowledgeBaseMetumColumns.MetaID + " DESC"
					list, _, err := l.svcCtx.KBEntry.GetMeta(l.ctx, model.M{
						"orderBy": orderBy,
					}, -1, subFilterMap)
					if err != nil {
						return nil, err
					}

					if len(list) > 0 {
						keyMap := make(map[uint64]map[string]struct{})
						for _, v := range list {
							key := v.MetaKey
							if len(key) > 0 && v.MetaValue.Valid {
								if subMap, ok := keyMap[v.MetaID]; ok { // 防重复
									if _, ok := subMap[key]; ok {
										continue
									}

									keyMap[v.MetaID][v.MetaKey] = struct{}{}
								} else {
									keyMap[v.MetaID] = make(map[string]struct{})
								}

								if field, ok := metaKeyMap[key]; ok {
									idMetaMap[v.KnowledgeBaseID] = append(idMetaMap[v.KnowledgeBaseID], &blog.MetaInfo{
										Id:    &v.MetaID,
										Key:   pointy.GetPointer(field),
										Value: pointy.GetPointer(v.MetaValue.String),
									})
								}
							}
						}
					}
				}
			}
		}

		idReactionLogMap := make(map[uint64]*blog.ReactionLogInfo)
		reactionLogEntityName := model.RpcEntityNames.BlogReactionLog
		if _, ok := subEntitySettingMap[reactionLogEntityName]; ok {
			reactionLogRepositoryName := model.RepositoryNames.BlogReactionLog
			subPreloadFields, hasSubPreloadFields := preloadMap[reactionLogEntityName]
			subColumnMap, hasSubColumnMap := model.RepositoryPreloadMap[reactionLogRepositoryName]
			if hasSubPreloadFields && hasSubColumnMap {
				subSelectCols := modelx.ParseSelectColumns(subPreloadFields, subColumnMap)

				if !preFilteredByReactionLog {
					subFilterMap := make(map[string]map[string][]any)
					subFilterMap[model.BlogReactionLogColumns.PostID] = map[string][]any{
						"in": ids,
					}
					subFilterMap[model.BlogReactionLogColumns.PostType] = map[string][]any{
						"equalTo": {globalkey.AttachmentLogTypeKnowledgeBase},
					}
					reactionLogs, _, err = l.svcCtx.Reaction.GetList(l.ctx, model.M{
						"orderBy": orderBy,
						"latest":  "1",
						"select":  strings.Join(subSelectCols, ","),
					}, -1, subFilterMap)
					if err != nil {
						return nil, err
					}
				}

				if len(reactionLogs) > 0 {
					for _, v := range reactionLogs {
						idReactionLogMap[v.PostID] = &blog.ReactionLogInfo{
							Id:         &v.ID,
							PostId:     &v.PostID,
							PostType:   pointy.GetPointer(uint32(v.PostType)),
							UserId:     &v.UserID,
							Type:       &v.Type,
							Status:     pointy.GetPointer(uint32(v.Status)),
							Count:      &v.Count,
							CreatedAt:  pointy.GetPointer(modelx.FormatTime(v.CreatedAt, "")),
							CreateDate: pointy.GetPointer(uint32(v.CreateDate)),
						}
					}
				}
			}
		}

		for _, v := range res {
			var media []*blog.TmpMediaInfo
			if ms, ok := idMediaMap[v.ID]; ok {
				media = ms[:]
			}
			var meta []*blog.MetaInfo
			if ms, ok := idMetaMap[v.ID]; ok {
				meta = ms[:]
			}
			var reactionLog *blog.ReactionLogInfo
			if log, ok := idReactionLogMap[v.ID]; ok {
				reactionLog = log
			}
			resp.List = append(resp.List, &blog.KBPostInfo{
				Id:            &v.ID,
				Title:         &v.PostTitle,
				Slug:          &v.PostName,
				Excerpt:       &v.PostExcerpt,
				CommentStatus: pointy.GetPointer(uint32(v.CommentStatus)),
				Sort:          pointy.GetPointer(uint32(v.MenuOrder)),
				Uri:           &v.GUID,
				AuthorId:      &v.PostAuthorID,
				ParentId:      &v.PostParentID,
				SectionId:     &v.SectionID,
				// Sticky:          pointy.GetPointer(uint32(sticky)),
				Sticky:      pointy.GetPointer(uint32(v.StickyType)),
				CreatedAt:   pointy.GetPointer(modelx.FormatTime(v.PostDate, "")),
				UpdatedAt:   pointy.GetPointer(modelx.FormatTime(v.PostModifiedDate, "")),
				Meta:        meta,
				Media:       media,
				ReactionLog: reactionLog,
			})
		}
	}

	resp.Total = uint64(total)

	return resp, nil
}

func (l *GetKBPostListTmpLogic) search(keyword string, page uint64, pageSize uint64) ([]uint64, error) {
	var buf bytes.Buffer
	var mustConditions []map[string]any
	//todo:有需要再加
	mustConditions = append(mustConditions, map[string]any{
		"multi_match": map[string]any{
			"query": keyword,
			"fields": []string{
				"Title", "Slug",
				// "Title^3", "Slug",
			},
			"fuzziness": "AUTO",
			"operator":  "or",
		},
	})

	query := map[string]any{
		"query": map[string]any{
			"bool": map[string]any{
				"must": mustConditions,
			},
		},
		"_source": []string{"Id"},
		"from":    int((page - 1) * pageSize),
		"size":    int(pageSize),
	}

	// logrus.Info(fmt.Sprintf("AdvancedSearch query: %+v", query))

	if err := json.NewEncoder(&buf).Encode(query); err != nil {
		logrus.Info(fmt.Sprintf("AdvancedSearch Error encoding query: %+v", err))
		logx.Infow("AdvancedSearch encoding err", logx.Field("detail", err.Error()))
		return nil, err
	}

	// Perform the search request.
	client := l.svcCtx.EsClient
	if client == nil {
		logrus.Info("Error getting EsClient")
		logx.Infow("Error getting EsClient")
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	res, err := client.Search(
		client.Search.WithContext(l.ctx),
		client.Search.WithIndex("scentrobe-kb-entry"),
		client.Search.WithBody(&buf),
		client.Search.WithTrackTotalHits(true),
		client.Search.WithPretty(),
	)
	if err != nil {
		logrus.Info(fmt.Sprintf("Error getting response: %+v", err))
		logx.Infow("AdvancedSearch getting response err", logx.Field("detail", err.Error()))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	defer res.Body.Close()
	if res.IsError() {
		var e map[string]any
		if err = json.NewDecoder(res.Body).Decode(&e); err != nil {
			logrus.Info(fmt.Sprintf("Error parsing the response body1: %+v", err))
			// Error parsing the response body: EOF
			logx.Infow("AdvancedSearch parsing the err response body err", logx.Field("detail", err.Error()))
		} else {
			logrus.Info(fmt.Sprintf("parsing_exception : %+v", e))
			// Print the response status and error information.
			logrus.Info(fmt.Sprintf(
				"[%s] %s: %s",
				res.Status(),
				e["error"].(map[string]any)["type"],
				e["error"].(map[string]any)["reason"],
			))
			// [400 Bad Request] parsing_exception: [_source] query malformed, no start_object after query name
		}

		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	// var r map[string]any
	// if err := json.NewDecoder(res.Body).Decode(&r); err != nil {
	// 	logrus.Info(fmt.Sprintf("Error parsing the response body2: %+v", err))
	// 	logx.Infow("AdvancedSearch parsing the response body err", logx.Field("detail", err.Error()))
	// 	return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	// }

	// logx.Infof("AdvancedSearch EsEntryModel raw : %+v ", r)

	var rr esmodel.EsEntryModel
	if err := json.NewDecoder(res.Body).Decode(&rr); err != nil {
		logrus.Info(fmt.Sprintf("Error parsing the response body3: %+v", err))
		logx.Infow("AdvancedSearch parsing the response body err", logx.Field("detail", err.Error()))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	var eids []uint64
	for _, v := range rr.Hits.Hits {
		eids = append(eids, v.Source.Id)
	}

	return eids, nil
}
