package searchs

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"strconv"
	"strings"

	esmodel "mall/common/esmodel/blog"
	"mall/common/resultx"
	"mall/common/utils/pointy"
	// "mall/service/backend/common/utils/pointy"
	"mall/service/blog/rpc/blogclient"
	"mall/service/forum/api/internal/svc"
	"mall/service/forum/api/internal/types"

	"github.com/sirupsen/logrus"
	"github.com/zeromicro/go-zero/core/logx"
)

type AdvancedSearchLogic struct {
	logx.Logger
	ctx    context.Context
	svcCtx *svc.ServiceContext
}

func NewAdvancedSearchLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AdvancedSearchLogic {
	return &AdvancedSearchLogic{
		Logger: logx.WithContext(ctx),
		ctx:    ctx,
		svcCtx: svcCtx,
	}
}

func (l *AdvancedSearchLogic) AdvancedSearch(req *types.AdvancedSearchsReq) (resp *types.AdvancedSearchsResp, err error) {
	// 1、模糊匹配
	// 2、year排序
	// 3、根据sectionID查询
	// 4、根据ID查询
	// 5、品牌查香水  香水查品牌、调香师、气味   气味查香水
	var page int64 = 1
	var pageSize int64
	if req.Current > 1 {
		page = req.Current
	}
	if req.PageSize > 0 { // 指定，检查最大值
		pageSize = req.PageSize
		if pageSize > l.svcCtx.Config.Query.MaxAmount {
			pageSize = l.svcCtx.Config.Query.MaxAmount
		}
	} else { // 不指定，使用默认值
		pageSize = l.svcCtx.Config.Query.DefaultAmount
	}

	var buf bytes.Buffer
	var mustConditions []map[string]any
	if req.Search != nil && len(strings.TrimSpace(*req.Search)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"multi_match": map[string]any{
				"query": *req.Search,
				"fields": []string{"Title", "Slug", "Excerpt",
					"Sections.Title", "Sections.Slug", "Brand.Title", "Brand.Slug",
					"Nose.Title", "Nose.Slug", "Note.Title", "Note.Slug",
				},
			},
		})
	}
	if req.Gender != nil && len(strings.TrimSpace(*req.Gender)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"match": map[string]any{
				"Gender": *req.Gender,
			},
		})
	}
	if req.Year != nil && len(strings.TrimSpace(*req.Year)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"match": map[string]any{
				"Year": *req.Year,
			},
		})
	}
	if req.BrandIds != nil && len(strings.TrimSpace(*req.BrandIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Brand.Id": strings.Split(strings.TrimSpace(*req.BrandIds), ","),
			},
		})
	}
	if req.NoseIds != nil && len(strings.TrimSpace(*req.NoseIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Nose.Id": strings.Split(strings.TrimSpace(*req.NoseIds), ","),
			},
		})
	}
	if req.NoteIds != nil && len(strings.TrimSpace(*req.NoteIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Note.Id": strings.Split(strings.TrimSpace(*req.NoteIds), ","),
			},
		})
	}

	if req.SectionId != nil && *req.SectionId > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"match": map[string]any{
				"Sections.Id": *req.SectionId,
			},
		})
	}
	if req.KBPostId != nil && *req.KBPostId > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"match": map[string]any{
				"Id": *req.KBPostId,
			},
		})
	}
	var order = map[string]any{}
	if req.Sorter != nil && len(strings.TrimSpace(*req.Sorter)) > 0 {
		split := strings.Split(*req.Sorter, ":")
		if len(split) > 2 && split[0] != "" && split[1] != "" {
		}
		order = map[string]any{
			split[0]: map[string]any{
				"order": split[1],
			},
		}
	}

	highlight := map[string]any{}
	if page == 1 {
		highlight = map[string]any{
			"pre_tags":  "<highlight>",
			"post_tags": "</highlight>",
			"fields": map[string]any{
				"Title":          map[string]any{},
				"Slug":           map[string]any{},
				"Excerpt":        map[string]any{},
				"Sections.Title": map[string]any{},
				"Sections.Slug":  map[string]any{},
				"Brand.Title":    map[string]any{},
				"Brand.Slug":     map[string]any{},
				"Nose.Title":     map[string]any{},
				"Nose.Slug":      map[string]any{},
				"Note.Title":     map[string]any{},
				"Note.Slug":      map[string]any{},
			},
		}
	}

	query := map[string]any{
		"query": map[string]any{
			"bool": map[string]any{
				"must": mustConditions,
			},
		},
		"sort":      order,
		"highlight": highlight,
		"_source": []string{"Id", "Title", "Slug", "Excerpt", "Year", "Gender",
			"Sections", "Brand", "Nose", "Note",
		},
		"from": int((page - 1) * pageSize),
		"size": int(pageSize),
	}
	if err := json.NewEncoder(&buf).Encode(query); err != nil {
		logrus.Info(fmt.Sprintf("AdvancedSearch Error encoding query: %+v", err))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	// Perform the search request.
	client := l.svcCtx.EsClient
	if client == nil {
		logrus.Info(fmt.Sprintf("Error getting EsClient"))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	res, err := client.Search(
		client.Search.WithContext(l.ctx),
		client.Search.WithIndex("scentrobe-kb-entry"),
		client.Search.WithBody(&buf),
		client.Search.WithTrackTotalHits(true),
		client.Search.WithPretty(),
	)
	if err != nil {
		logrus.Info(fmt.Sprintf("Error getting response: %+v", err))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	defer res.Body.Close()
	if res.IsError() {
		var e map[string]any
		if err = json.NewDecoder(res.Body).Decode(&e); err != nil {
			logrus.Info(fmt.Sprintf("Error parsing the response body: %+v", err))
		} else {
			// Print the response status and error information.
			logrus.Info(fmt.Sprintf(
				"[%s] %s: %s",
				res.Status(),
				e["error"].(map[string]any)["type"],
				e["error"].(map[string]any)["reason"],
			))
		}

		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	// logrus.Info(fmt.Sprintf("AdvancedSearch res: %+v", res))

	var rr esmodel.EsEntryModel
	if err := json.NewDecoder(res.Body).Decode(&rr); err != nil {
		logrus.Info(fmt.Sprintf("Error parsing the response body: %+v", err))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}
	var entryMap = map[uint64]*esmodel.Entry{}
	var eid []string
	// 提取数据  高亮显示
	for _, v := range rr.Hits.Hits {
		if _, ok := entryMap[v.Source.Id]; !ok {
			var title, slug, excerpt string
			if len(v.Highlight.Title) > 0 {
				for _, s := range v.Highlight.Title {
					title += s
				}
			}
			if len(v.Highlight.Slug) > 0 {
				for _, s := range v.Highlight.Slug {
					slug += s
				}
			}
			if len(v.Highlight.Excerpt) > 0 {
				for _, s := range v.Highlight.Excerpt {
					excerpt += s
				}
			}
			if len(strings.TrimSpace(title)) == 0 {
				title = v.Source.Title
			}
			if len(strings.TrimSpace(slug)) == 0 {
				slug = v.Source.Slug
			}
			// if len(strings.TrimSpace(excerpt)) == 0 {
			// 	excerpt = v.Source.Excerpt
			// }
			entryMap[v.Source.Id] = &esmodel.Entry{
				Id:      v.Source.Id,
				Title:   title,
				Slug:    slug,
				Excerpt: excerpt,
				// Year:    v.Source.Year,
				// Gender:  v.Source.Gender,
			}
			eid = append(eid, strconv.FormatUint(v.Source.Id, 10))
		}
	}
	if len(eid) > 0 {
		entryList, err := l.getKBPostListByIds(strings.Join(eid, ","))
		if err != nil {
			return nil, err
		}
		if len(entryList) > 0 {
			var list []*types.KbPostEntryInfo
			for _, v := range entryList {
				if _, ok := entryMap[*v.Id]; ok {
					v.Title = pointy.GetPointer(entryMap[*v.Id].Title)
					v.Slug = pointy.GetPointer(entryMap[*v.Id].Slug)
					v.Excerpt = pointy.GetPointer(entryMap[*v.Id].Excerpt)
					v.Year = pointy.GetPointer(strconv.FormatUint(entryMap[*v.Id].Year, 10))
					v.Gender = pointy.GetPointer(strconv.FormatUint(entryMap[*v.Id].Gender, 10))
				}

				list = append(list, v)
			}
			var hasNextPage bool
			if page*pageSize < int64(len(list)) {
				hasNextPage = true
			}
			return &types.AdvancedSearchsResp{
				Current:     page,
				PageSize:    pageSize,
				List:        list,
				HasNextPage: hasNextPage,
			}, nil
		}
	}
	return &types.AdvancedSearchsResp{}, nil
}
func (l *AdvancedSearchLogic) getKBPostListByIds(ids string) (resp []*types.KbPostEntryInfo, err error) {
	preload := "blogBook:Id,Title,Slug,Date,Modified;blogBookMeta:_review_count;blogMedia:Uid,Url,Thumbnail"
	eagerLoad := "blogBook:blogBookMeta,blogMedia"

	var filters []string
	filters = append(filters, "Id,in:"+ids)
	res, err := l.svcCtx.BlogRpc.GetKBPostList(l.ctx, &blogclient.KBPostListReq{
		Offset:    pointy.GetPointer(uint32(1)),
		Limit:     pointy.GetPointer(uint32(10)),
		Preload:   pointy.GetPointer(preload),
		EagerLoad: pointy.GetPointer(eagerLoad),
		Filter:    pointy.GetPointer(strings.Join(filters, ";")),
		WithMedia: pointy.GetPointer(uint32(1)),
		WithMeta:  pointy.GetPointer(uint32(1)),
	})
	if err != nil {
		return nil, err
	}
	if len(res.Data) > 0 {
		var kbPostMap = make(map[uint64]struct{}) //去重
		for _, v := range res.Data {
			if v.Id != nil && *v.Id > 0 {
				if _, ok := kbPostMap[*v.Id]; ok {
					continue
				}
				kbPostMap[*v.Id] = struct{}{}
				var mediaMap = make(map[uint64]*types.MediaInfo) //去重
				if len(v.Media) > 0 {
					for _, m := range v.Media {
						if m.Uid > 0 {
							if m.Thumbnail > 0 && len(m.Url) > 0 {
								mediaMap[*v.Id] = &types.MediaInfo{
									Uid:       pointy.GetPointer(uint64(m.Uid)),
									Thumbnail: pointy.GetPointer(true),
									Url:       pointy.GetPointer(m.Url),
								}
							} else if len(m.Url) > 0 {
								if _, ok := mediaMap[*v.Id]; !ok {
									mediaMap[*v.Id] = &types.MediaInfo{
										Uid:       pointy.GetPointer(uint64(m.Uid)),
										Thumbnail: pointy.GetPointer(false),
										Url:       pointy.GetPointer(m.Url),
									}
								}
							}
						}
					}
				}
				var entry = &types.KbPostEntryInfo{}
				if v.Categories != nil {
					entry.Category = []*types.CategoryInfo{
						{
							Id:    pointy.GetPointer(uint64(v.Categories.Id)),
							Name:  pointy.GetPointer(v.Categories.Name),
							Slug:  pointy.GetPointer(v.Categories.Slug),
							Alias: pointy.GetPointer(v.Categories.Uri),
						},
					}
				}
				entry.Id = v.Id
				entry.Excerpt = v.Excerpt
				entry.Media = []*types.MediaInfo{mediaMap[*v.Id]}
				entry.CreatedAt = v.CreatedAt
				entry.UpdatedAt = v.UpdatedAt
				resp = append(resp, entry)
			}
		}
	}
	return resp, nil
}
