package service

import (
	"context"
	"encoding/json"
	"fmt"
	"go-toy/app/esmodel"
	"go-toy/app/model"
	"go-toy/database/elasticsearch"
	"go-toy/database/orm"
	"go-toy/util"
	"sync"

	"github.com/olivere/elastic/v7"
	"golang.org/x/sync/semaphore"
)

func SpiderInstance() *Spider {
	return &Spider{}
}

type Spider struct {
	Base
}

func (s *Spider) List(offset int, limit int, order any, columns []string, conditions map[string]any) (spiders []model.Spider, total int64, err error) {
	modelSpider := model.Spider{}

	// Configure the base query
	s.db = orm.DB.Model(&modelSpider)
	s.primaryKey = modelSpider.PrimaryKey()
	s.offset = offset
	s.limit = limit
	s.order = order
	s.columns = columns
	s.conditions = conditions

	// has more word
	s.db = s.db.Preload("SpiderWords.ConvertGoods")

	err = s.GeneralList(&total, &spiders)
	if err != nil {
		return nil, 0, err
	}

	return spiders, total, err
}

// Full renewal spider index to elasticsearch
func (s *Spider) FullRenewalSpiderIndex() {
	// initialize spiderindex of elasticsearch
	index := esmodel.SpiderESIndex
	proxy := elasticsearch.GetProxy().BuildClient()
	mapping := elasticsearch.ESIndexMapping
	mapping.Mappings.Properties = esmodel.SpiderESProperties
	proxy.DeleteIndex(index)
	proxy.CreatedIndex(index, mapping)

	var total int64 = 500
	limit := 500
	offset := 0
	order := "spider_id ASC"

	spiders, total, _ := SpiderInstance().List(offset, limit, order, nil, nil)
	offset += limit
	s.insertSpider2ES(proxy, index, &spiders)

	var wg sync.WaitGroup
	// max concurrency
	sem := semaphore.NewWeighted(10)

	for int64(offset) < total {
		wg.Add(1)
		go func(offset int, limit int) {
			defer wg.Done()
			if err := sem.Acquire(context.Background(), 1); err != nil {
				util.UtilPanic(fmt.Errorf("failed to acquire semaphore: %s", err.Error()))
				return
			}
			defer sem.Release(1)

			spiders, _, _ := SpiderInstance().List(offset, limit, order, nil, nil)
			s.insertSpider2ES(proxy, index, &spiders)
		}(offset, limit)
		offset += limit
	}

	wg.Wait()
	util.PrintlnSuccess("All documents inserted.")
}

// Incremental update spider index to elasticsearch
func (s *Spider) IncrUpdateSpiderIndex() {
	index := esmodel.SpiderESIndex
	proxy := elasticsearch.GetProxy().BuildClient()

	spiderModel := model.Spider{}
	sortKey := spiderModel.PrimaryKey()
	result, err := proxy.ESClient.Search().
		Index(index).
		Sort(sortKey, false).
		Size(1).
		Do(context.Background())
	if err != nil {
		util.ServicePanic(fmt.Errorf("failed to search document: %s", err.Error()))
	}

	if result.TotalHits() == 0 {
		s.FullRenewalSpiderIndex()
		return
	}

	var lastSpiderId int32 = 0
	for _, hit := range result.Hits.Hits {
		var doc map[string]any
		err := json.Unmarshal(hit.Source, &doc)
		if err != nil {
			util.UtilPanic(err)
		}

		switch doc[sortKey].(type) {
		case int:
			lastSpiderId = int32(doc[sortKey].(int))
		case float32:
		case float64:
			lastSpiderId = int32(doc[sortKey].(float64))
		default:
			util.ServicePanic(fmt.Errorf("the value of %s is not int", sortKey))
			return
		}
	}

	var total int64 = 500
	limit := 500
	offset := 0

	spiders, total := s.selectSomeSpiders(offset, limit, &lastSpiderId)
	s.insertSpider2ES(proxy, index, &spiders)
	offset += limit

	for int64(offset) < total {
		spiders, _ = s.selectSomeSpiders(offset, limit, &lastSpiderId)
		s.insertSpider2ES(proxy, index, &spiders)
		offset += limit
	}

	util.PrintlnSuccess("All documents inserted.")
}

// Select spiders for incremental update
func (s *Spider) selectSomeSpiders(offset int, limit int, minSpiderId *int32) (spiders []model.Spider, total int64) {
	modelSpider := model.Spider{}
	query := orm.DB.Model(&modelSpider)
	query = query.Preload("SpiderWords.ConvertGoods")
	if *minSpiderId != 0 {
		query = query.Where("spider_id > ?", *minSpiderId)
	}

	err := query.Count(&total).Error
	if err != nil {
		util.ServicePanic(err)
	}

	query.Limit(limit).Offset(offset).Find(&spiders)

	return spiders, total
}

// Insert spider index to elasticsearch
func (s *Spider) insertSpider2ES(proxy *elasticsearch.Proxy, index string, spiders *[]model.Spider) {
	// insert data
	var spiderESModelList []any
	for _, spider := range *spiders {
		var spiderCoveryGoodsESModelList []esmodel.SpiderCoveryGoodsESModel
		for _, spiderWord := range spider.SpiderWords {
			if spiderWord.ConvertGoodsID != 0 && spiderWord.ConvertGoods.ConvertGoodsID != 0 {
				spiderCoveryGoodsESModelList = append(spiderCoveryGoodsESModelList, esmodel.SpiderCoveryGoodsESModel{
					GoodsId:   spiderWord.ConvertGoodsID,
					GoodsName: spiderWord.ConvertGoods.GoodsName,
				})
			}
		}
		spiderESModelList = append(spiderESModelList, esmodel.SpiderESModel{
			SpiderId:           spider.SpiderID,
			Content:            spider.Content,
			CreatedTime:        spider.CreatedTime,
			SpiderGoodsConvery: spiderCoveryGoodsESModelList,
		})
	}
	if len(spiderESModelList) > 0 {
		proxy.AddDocuments(index, spiderESModelList)
	}
}

// Search from elasticsearch by Bool and Match query
func (s *Spider) SearchFromEs(keyword string, offset int, limit int) []int32 {
	proxy := elasticsearch.GetProxy().BuildClient()

	query := elastic.NewBoolQuery().Should(
		elastic.NewMatchQuery("content", keyword),
		elastic.NewMatchQuery("spider_goods_convery.goods_name", keyword),
	)
	searchResult := proxy.SearchDocumentSimple(esmodel.SpiderESIndex, query)

	spiderIds := make([]int32, 0)
	for _, hit := range searchResult.Hits.Hits {
		var spider esmodel.SpiderESModel
		err := json.Unmarshal(hit.Source, &spider)
		if err != nil {
			util.UtilPanic(err)
		}
		spiderIds = append(spiderIds, spider.SpiderId)
	}
	return spiderIds
}
