package search

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"regexp"
	"strings"
	"strconv"

	"mall/common/ctxdatax"
	"mall/common/enumx"
	"mall/common/moneyx"
	"mall/common/resultx"
	// "mall/service/backend/common/utils/pointy"
	"mall/service/blog/rpc/blogclient"
	"mall/service/forum/api/internal/svc"
	"mall/service/forum/api/internal/types"

	"github.com/sirupsen/logrus"
	"github.com/zeromicro/go-zero/core/logx"
)

type AdvancedSearchLogic struct {
	logx.Logger
	ctx    context.Context
	svcCtx *svc.ServiceContext
}

func NewAdvancedSearchLogic(ctx context.Context, svcCtx *svc.ServiceContext) *AdvancedSearchLogic {
	return &AdvancedSearchLogic{
		Logger: logx.WithContext(ctx),
		ctx:    ctx,
		svcCtx: svcCtx,
	}
}

func (l *AdvancedSearchLogic) AdvancedSearch(req *types.AdvancedSearchReq) (resp *types.AdvancedSearchResp, err error) {
	// logx.Infof("AdvancedSearchReq : %+v ", req)
	search := strings.TrimSpace(req.Search)
	if len(search) == 0 && req.BrandIds == "" && req.NoseIds == "" && req.GroupIds == "" && req.NoteIds == "" {
		return nil, resultx.NewErrCode(resultx.ENTRY_NO_SEARCH_TERM_SPECIFIED)
	}

	var page int64 = 1
	var pageSize int64
	if req.Current > 1 {
		page = req.Current
	}
	if req.PageSize > 0 { // 指定，检查最大值
		pageSize = req.PageSize
		if pageSize > l.svcCtx.Config.Query.MaxAmount {
			pageSize = l.svcCtx.Config.Query.MaxAmount
		}
	} else { // 不指定，使用默认值
		pageSize = l.svcCtx.Config.Query.DefaultAmount
	}

	var filters []string
	reqFilter := strings.TrimSpace(req.Filter)
	if reqFilter != "" {
		for _, filter := range strings.Split(reqFilter, ",") {
			switch filter {
			case "brand":
				filters = append(filters, "brands")
			case "perfume":
				filters = append(filters, "perfumes")
			case "nose":
				filters = append(filters, "perfumers")
			case "group":
				filters = append(filters, "fragrance families")
			case "note":
				filters = append(filters, "notes")
			}
		}
	}

	// Search for the indexed document
	// Build the request body
	var buf bytes.Buffer
	var query map[string]any
	var mustConditions []map[string]any
	if len(filters) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Sections.Slug": filters,
			},
		})
	}
	if len(search) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"multi_match": map[string]any{
				"query":  search,
				"fields": []string{"Title^3", "Slug^3", "ContentHtml", "Brand.Title", "Brand.Slug", "Nose.Title", "Nose.Slug", "Group.Title", "Group.Slug", "Note.Title", "Note.Slug"},
			},
		})
	}
	if len(strings.TrimSpace(req.BrandIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Brand.Id": strings.Split(strings.TrimSpace(req.BrandIds), ","),
			},
		})
	}
	if len(strings.TrimSpace(req.NoseIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Nose.Id": strings.Split(strings.TrimSpace(req.NoseIds), ","),
			},
		})
	}
	if len(strings.TrimSpace(req.GroupIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Group.Id": strings.Split(strings.TrimSpace(req.GroupIds), ","),
			},
		})
	}
	if len(strings.TrimSpace(req.NoteIds)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Note.Id": strings.Split(strings.TrimSpace(req.NoteIds), ","),
			},
		})
	}
	if len(strings.TrimSpace(req.Seasons)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Season.Key": strings.Split(strings.TrimSpace(req.Seasons), ","),
			},
		})
	}
	if len(strings.TrimSpace(req.Genders)) > 0 {
		mustConditions = append(mustConditions, map[string]any{
			"terms": map[string]any{
				"Gender.Key": strings.Split(strings.TrimSpace(req.Genders), ","),
			},
		})
	}

	highlight := map[string]any{}
	if page == 1 {
		highlight = map[string]any{
			"pre_tags": "<highlight>",
			"post_tags": "</highlight>",
			"fields": map[string]any{
				"Title": map[string]any{},
				"Slug": map[string]any{},
			},
		}
	}

	query = map[string]any{
		"query": map[string]any{
			"bool": map[string]any{
				"must": mustConditions,
			},
		},
		"highlight": highlight,
		"_source": []string{"Id", "Title", "Slug", "Sections.Name", "Sections.Uri", "Media.Uid", "Media.Id", "Media.Url",
			"Meta.Key", "Meta.Value", "Relations.Id", "Relations.Title", "Relations.Slug", "Relations.Section", "Relations.GroupId",
			"Brand.Id", "Brand.Title", "Brand.Slug", "Nose.Id", "Nose.Title", "Nose.Slug", "Group.Id", "Group.Title", "Group.Slug",
			"Note.Id", "Note.Title", "Note.Slug", "Note.GroupId", "Season.Key", "Gender.Value", "Gender.Key", "Season.Value",
			"Year", "Rating",
		},
		"from": int((page-1)*pageSize),
		"size": int(pageSize),
	}
	if err := json.NewEncoder(&buf).Encode(query); err != nil {
		logrus.Info(fmt.Sprintf("AdvancedSearch Error encoding query: %+v", err))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	// Perform the search request.
	client := l.svcCtx.EsClient
	if client == nil {
		logrus.Info(fmt.Sprintf("Error getting EsClient"))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	res, err := client.Search(
		client.Search.WithContext(l.ctx),
		client.Search.WithIndex("scentrobe-kb-entry"),
		client.Search.WithBody(&buf),
		client.Search.WithTrackTotalHits(true),
		client.Search.WithPretty(),
	)
	if err != nil {
		logrus.Info(fmt.Sprintf("Error getting response: %+v", err))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	defer res.Body.Close()
	if res.IsError() {
		var e map[string]any
		if err = json.NewDecoder(res.Body).Decode(&e); err != nil {
			logrus.Info(fmt.Sprintf("Error parsing the response body: %+v", err))
		} else {
			// Print the response status and error information.
			logrus.Info(fmt.Sprintf(
				"[%s] %s: %s",
				res.Status(),
				e["error"].(map[string]any)["type"],
				e["error"].(map[string]any)["reason"],
			))
		}

		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	// logrus.Info(fmt.Sprintf("AdvancedSearch res: %+v", res))

	var r map[string]any
	if err := json.NewDecoder(res.Body).Decode(&r); err != nil {
		logrus.Info(fmt.Sprintf("Error parsing the response body: %+v", err))
		return nil, resultx.NewErrCode(resultx.SERVER_COMMON_ERROR)
	}

	// Print the response status, number of results, and request duration.
	// logrus.Info(fmt.Sprintf(
	// 	"[%s] %d hits; took: %dms",
	// 	res.Status(),
	// 	int(r["hits"].(map[string]any)["total"].(map[string]any)["value"].(float64)),
	// 	int(r["took"].(float64)),
	// ))

	// Print the ID and document source for each hit.
	// for _, hit := range r["hits"].(map[string]any)["hits"].([]any) {
	// 	logrus.Info(fmt.Sprintf(" * ID=%s, %s", hit.(map[string]any)["_id"], hit.(map[string]any)["_source"]))
	// }

	var list []*types.PerfumeInfo
	var total int64
	total = int64(r["hits"].(map[string]any)["total"].(map[string]any)["value"].(float64))
	var keywords []string
	// 解释正则表达式
	reg := regexp.MustCompile(`<highlight>(?s:(.*?))</highlight>`)
	if reg == nil {
		logrus.Info("AdvancedSearch MustCompile err")
	}
	var searchId int64
	for _, hit := range r["hits"].(map[string]any)["hits"].([]any) {
		item := hit.(map[string]any)["_source"].(map[string]any)
		var sections []*types.ForumCategory
		if _, exist := item["Sections"]; exist {
			for _, section := range item["Sections"].([]any) {
				sections = append(sections, &types.ForumCategory{
					Name:  section.(map[string]any)["Name"].(string),
					Alias: section.(map[string]any)["Uri"].(string),
				})
			}
		}
		var media []*types.ForumMedia
		if _, exist := item["Media"]; exist {
			for _, v := range item["Media"].([]any) {
				m := v.(map[string]any)
				var mUid int64
				var mId int64
				var mUrl string
				if mv, ok := m["Uid"]; ok {
					mUid = int64(mv.(float64))
				}
				if mv, ok := m["Id"]; ok {
					mId = int64(mv.(float64))
				}
				if mv, ok := m["Url"]; ok {
					mUrl = mv.(string)
				}
				media = append(media, &types.ForumMedia{
					Uid: mUid,
					Id:  mId,
					Url: mUrl,
				})
			}
		}
		var reviewCount int32
		var recommended bool
		if _, exist := item["Meta"]; exist {
			for _, m := range item["Meta"].([]any) {
				meta := m.(map[string]any)
				val := meta["Value"].(string)
				switch meta["Key"].(string) {
				case "_review_count":
					if i, err := strconv.ParseInt(val, 10, 32); err == nil {
						reviewCount = int32(i)
					}
				case "_recommended":
					recommended = true
				}
			}
		}
		var brand []*types.PerfumeInfo
		var nose []*types.PerfumeInfo
		var group []*types.PerfumeInfo
		var topNote []*types.PerfumeInfo
		var heartNote []*types.PerfumeInfo
		var baseNote []*types.PerfumeInfo
		var perfume []*types.PerfumeInfo
		var version []*types.PerfumeInfo
		if _, exist := item["Brand"]; exist {
			for _, r := range item["Brand"].([]any) {
				relation := r.(map[string]any)
				brand = append(brand, &types.PerfumeInfo{
					Id:    int64(relation["Id"].(float64)),
					Title: relation["Title"].(string),
					Slug:  relation["Slug"].(string),
				})
			}
		}
		if _, exist := item["Nose"]; exist {
			for _, r := range item["Nose"].([]any) {
				relation := r.(map[string]any)
				nose = append(nose, &types.PerfumeInfo{
					Id:    int64(relation["Id"].(float64)),
					Title: relation["Title"].(string),
					Slug:  relation["Slug"].(string),
				})
			}
		}
		if _, exist := item["Group"]; exist {
			for _, r := range item["Group"].([]any) {
				relation := r.(map[string]any)
				group = append(group, &types.PerfumeInfo{
					Id:    int64(relation["Id"].(float64)),
					Title: relation["Title"].(string),
					Slug:  relation["Slug"].(string),
				})
			}
		}
		if _, exist := item["Note"]; exist {
			for _, r := range item["Note"].([]any) {
				relation := r.(map[string]any)
				groupId := int(relation["GroupId"].(float64))
				switch groupId {
				case 1:
					topNote = append(topNote, &types.PerfumeInfo{
						Id:    int64(relation["Id"].(float64)),
						Title: relation["Title"].(string),
						Slug:  relation["Slug"].(string),
					})
				case 2:
					heartNote = append(heartNote, &types.PerfumeInfo{
						Id:    int64(relation["Id"].(float64)),
						Title: relation["Title"].(string),
						Slug:  relation["Slug"].(string),
					})
				case 3:
					baseNote = append(baseNote, &types.PerfumeInfo{
						Id:    int64(relation["Id"].(float64)),
						Title: relation["Title"].(string),
						Slug:  relation["Slug"].(string),
					})
				}
			}
		}
		if _, exist := item["Relations"]; exist {
			for _, r := range item["Relations"].([]any) {
				relation := r.(map[string]any)
				section := relation["Section"].(string)
				switch section {
				case "perfume":
					perfume = append(perfume, &types.PerfumeInfo{
						Id:    int64(relation["Id"].(float64)),
						Title: relation["Title"].(string),
						Slug:  relation["Slug"].(string),
					})
				case "version":
					version = append(version, &types.PerfumeInfo{
						Id:    int64(relation["Id"].(float64)),
						Title: relation["Title"].(string),
						Slug:  relation["Slug"].(string),
					})
				}
			}
		}
		if page == 1 {
			if highlight, exist := hit.(map[string]any)["highlight"]; exist {
				highlightItem := highlight.(map[string]any)
				if _, ok := highlightItem["Title"]; ok {
					for _, partTitle := range highlightItem["Title"].([]any) {
						// 提取关键信息
						partTitleStr := partTitle.(string)
						regRes := reg.FindAllStringSubmatch(partTitleStr, -1)
						// 过滤<></>
						var subTitle string
						var keyword string
						for _, subText := range regRes {
							subTitle += subText[0]
							keyword += subText[1]
						}
						if strings.Contains(partTitleStr, subTitle) {
							keywords = append(keywords, keyword)
						}
					}
				}
			}
		}

		var id int64
		if _, exist := item["Id"]; exist {
			id = int64(item["Id"].(float64))
		}
		var title string
		if _, exist := item["Title"]; exist {
			title = item["Title"].(string)
		}
		var slug string
		if _, exist := item["Slug"]; exist {
			slug = item["Slug"].(string)
		}
		var year string
		if _, exist := item["Year"]; exist {
			year = item["Year"].(string)
		}
		var averageRating string
		if _, exist := item["Rating"]; exist {
			averageRating = moneyx.ParseStar(item["Rating"].(string), 1)
		}

		list = append(list, &types.PerfumeInfo{
			Id:            id,
			Title:         title,
			Slug:          slug,
			Year:          year,
			ReviewCount:   reviewCount,
			AverageRating: averageRating,
			Recommended:   recommended,
			Section:       sections,
			Media:         media,
			Brand:         brand,
			Nose:          nose,
			Group:         group,
			TopNote:       topNote,
			HeartNote:     heartNote,
			BaseNote:      baseNote,
			Perfume:       perfume,
			Version:       version,
		})
	}

	if page == 1 && len(keywords) > 0 {
		var highlights []string
		keywordMap := make(map[string]int)
		for idx, keyword := range keywords {
			if _, exist := keywordMap[keyword]; !exist {
				keywordMap[keyword] = idx
				highlights = append(highlights, keyword)
			}
		}
		userId := ctxdatax.GetUserId(l.ctx)
		// logrus.Info(fmt.Sprintf("AdvancedSearch keywords: %+v", highlights))
		res, err := l.svcCtx.BlogRpc.SearchLog(l.ctx, &blogclient.SearchLogReq{
			Keyword:    strings.Join(highlights, ","),
			UserId:     int64(userId),
			PlatformId: int32(enumx.ThirdPartyPlatformWechatMini),
		})
		if err != nil {
			logrus.Info(fmt.Sprintf("AdvancedSearch SearchLog err: %+v", err))
		} else {
			searchId = res.SearchLogId
		}
	}

	var hasNextPage bool
	if page*pageSize < total {
		hasNextPage = true
	}

	return &types.AdvancedSearchResp{
		Current:     page,
		PageSize:    pageSize,
		List:        list,
		HasNextPage: hasNextPage,
		SearchId:    searchId,
	}, nil
}
