package service

import (
	"encoding/json"
	"errors"
	"fmt"
	"gin/common"
	Mysql "gin/database"
	"gin/model"
	"gin/queue"
	nlpTcDict "gin/tai/dict"
	nlpTcSentence "gin/tai/sentence"
	"gin/validate"
	"github.com/Gre-Z/common/jtime"
	"github.com/spf13/viper"
	nlp "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/nlp/v20190408"
	"gorm.io/gorm"
	"time"
)

func CreateSentence(data model.Sentence) (model.Sentence, error) {
	_, err := data.Insert()
	return data, err
}

func FindSentence(Param common.SelectQueryParam) (resData model.Sentence, err error) {
	var logModel model.Sentence

	resData, err = logModel.Find(Param)

	return
}

func SelectSentence(Param *common.SelectQueryParam) (total int64, resData []model.Sentence, err error) {
	var logModel model.Sentence

	total, resData, err = logModel.Select(Param)

	return
}

func UpdateSentence(Data validate.UpdateSentence) (resData model.Sentence, err error) {

	var dictWord model.Sentence

	resData, err = dictWord.Update(model.Sentence{
		Id:        Data.Id,
		Content:   Data.Content,
		Neutral:   Data.Neutral,
		Positive:  Data.Positive,
		Sentiment: Data.Sentiment,
		Negative:  Data.Negative,
		CustomTag: Data.CustomTag,
	})
	return
}

func DeleteSentence(param *common.Filter) {
	var logModel model.Sentence
	logModel.DeleteByFilter(param)
	return
}

// 语句结构分析
func LexicalAnalysisSentence(Param *common.SelectQueryParam, dictId uint64, replaceOldResult int8, isAsync bool, isSentimentAnalysis bool, keywordsExtraction bool) (map[uint64]nlp.LexicalAnalysisResponse, error) {

	var sentenceModel model.Sentence
	var responses = make(map[uint64]nlp.LexicalAnalysisResponse)
	total, sentenceModels, err := sentenceModel.Select(Param)
	if err != nil {
		return responses, err
	}
	if total <= 0 {
		return responses, err
	}

	var dictModel model.Dict
	if dictId > 0 {
		_ = model.FindById(&dictModel, dictId, nil)
	}

	if isSentimentAnalysis {
		_, _ = SentimentAnalysisSentence(Param, isAsync)
	}
	if keywordsExtraction {
		_, _ = KeywordsExtractionSentence(Param, isAsync)
	}
	var sentenceNerTokenModel model.SentenceNerToken
	var sentenceWordModel model.SentenceWord

	for index, sentenceModel := range sentenceModels {
		if isAsync {
			queue.EnQueue(&queue.LexicalAnalysisSentenceJob{
				SentenceId: sentenceModel.Id,
				DictId:     dictId,
			})
			continue
		}

		if index != 0 && index%viper.GetInt("tai.maxQuerySec") == 0 {
			time.Sleep(time.Duration(1) * time.Second)
		}

		response, err := nlpTcDict.LexicalAnalysis(dictModel.DictId, sentenceModel.Content)
		if err != nil {
			continue
		}
		responses[sentenceModel.Id] = *response
		if replaceOldResult >= 1 {

			var deleteCondition = fmt.Sprintf("sentence_id =  %d", sentenceModel.Id)
			sentenceWordModel.DeleteAll(deleteCondition)
			sentenceNerTokenModel.DeleteAll(deleteCondition)

			if len(response.Response.PosTokens) > 0 {
				var sentenceWordInsert []model.SentenceWord
				for _, posToken := range response.Response.PosTokens {
					sentenceWordInsert = append(sentenceWordInsert, model.SentenceWord{
						BeginOffset: *posToken.BeginOffset,
						Word:        *posToken.Word,
						Length:      *posToken.Length,
						Pos:         *posToken.Pos,
						SentenceId:  sentenceModel.Id,
						Created:     jtime.TstampTime(time.Now().Unix()),
					})
				}
				sentenceWordModel.InsertAll(&sentenceWordInsert)
			}

			if len(response.Response.NerTokens) > 0 {
				nerTokenByte, jsonErr := json.Marshal(response.Response.NerTokens)
				if jsonErr != nil {
					return responses, jsonErr

				}

				_, err := sentenceModel.Update(model.Sentence{
					Id:       sentenceModel.Id,
					NerToken: nerTokenByte,
				})
				if err != nil {
					return responses, err

				}

				sentenceNerTokeInsert := make([]model.SentenceNerToken, len(response.Response.NerTokens))
				for index, nerToken := range response.Response.NerTokens {
					sentenceNerTokeInsert[index] = model.SentenceNerToken{
						BeginOffset: *nerToken.BeginOffset,
						Word:        *nerToken.Word,
						Length:      *nerToken.Length,
						Type:        *nerToken.Type,
						SentenceId:  sentenceModel.Id,
						Created:     jtime.TstampTime(time.Now().Unix()),
					}
				}
				sentenceNerTokenModel.InsertAll(&sentenceNerTokeInsert)
			}

		}
		_, err = sentenceModel.Update(model.Sentence{
			Id:         sentenceModel.Id,
			IsAnalysis: "yes",
		})
	}

	return responses, err
}

// 情感分析
func SentimentAnalysisSentence(Param *common.SelectQueryParam, isAsync bool) (map[uint64]*nlp.SentimentAnalysisResponse, error) {

	var sentenceModel model.Sentence
	var res = make(map[uint64]*nlp.SentimentAnalysisResponse)
	total, sentenceModels, err := sentenceModel.Select(Param)
	if err != nil || total <= 0 {
		return res, err
	}

	for index, sentenceModel := range sentenceModels {

		if isAsync {
			queue.EnQueue(&queue.SentimentAnalysisSentenceJob{
				SentenceId: sentenceModel.Id,
			})
			continue
		}

		if index != 0 && index%viper.GetInt("tai.maxQuerySec") == 0 {
			time.Sleep(time.Duration(1) * time.Second)
		}

		response, nlpErr := nlpTcSentence.SentimentAnalysis(sentenceModel.Content)

		res[sentenceModel.Id] = response
		if nlpErr != nil {
			continue
		}

		_, err = sentenceModel.Update(model.Sentence{
			Id:        sentenceModel.Id,
			Neutral:   *response.Response.Neutral,
			Positive:  *response.Response.Positive,
			Sentiment: *response.Response.Sentiment,
			Negative:  *response.Response.Negative,
		})
		if err != nil {
			continue
		}
	}

	return res, err
}

// 关键词分析
func KeywordsExtractionSentence(Param *common.SelectQueryParam, isAsync bool) (map[uint64]*nlp.KeywordsExtractionResponse, error) {

	var sentenceModel model.Sentence
	var res = make(map[uint64]*nlp.KeywordsExtractionResponse)
	var number uint64
	number = 5

	total, sentenceModels, err := sentenceModel.Select(Param)

	if err != nil || total <= 0 {
		return res, err
	}

	for index, sentenceModel := range sentenceModels {

		if isAsync {
			queue.EnQueue(&queue.KeywordsExtractionSentenceJob{
				SentenceId: sentenceModel.Id,
				Number:     number,
			})
			continue
		}

		if index != 0 && index%viper.GetInt("tai.maxQuerySec") == 0 {
			time.Sleep(time.Duration(1) * time.Second)
		}

		response, nlpErr := nlpTcSentence.KeywordsExtraction(&sentenceModel.Content, &number)

		res[sentenceModel.Id] = response
		if nlpErr != nil {
			continue
		}

		if len(response.Response.Keywords) > 0 {

			keywordsByte, jsonErr := json.Marshal(response.Response.Keywords)
			if jsonErr != nil {
				return res, jsonErr
			}

			_, err := sentenceModel.Update(model.Sentence{
				Id:       sentenceModel.Id,
				Keywords: keywordsByte,
			})
			if err != nil {
				return res, err
			}
		}
	}

	return res, err
}

// 分析并添加记录  支持dict_id指定
func AddAndLexicalAnalysisSentence(data *validate.SentenceAddAndLexicalAnalysis) (model.Sentence, map[string]interface{}, error) {
	db := Mysql.DB
	var res model.Sentence
	result := db.Preload("SentenceWords").Preload("SentenceWords.WordSpeech").Where(model.Sentence{
		Content: data.Content,
		DictId:  data.DictId,
	}).Take(&res)

	if result.Error != nil && !errors.Is(result.Error, gorm.ErrRecordNotFound) {
		return res, nil, result.Error
	}
	if result.Error == nil && res.Id > 0 {

		// 对比本地词库 匹配对应的资产
		matchEntities := map[string]interface{}{}
		if data.CompareLocalCustomTag {
			matchEntities = MatchLocalEntities(res.SentenceWords, data.DictId, data.SpecifiedMatchEntityType, data.MatchNumber)
		}

		return res, matchEntities, nil
	}

	res, err := CreateSentence(model.Sentence{
		Content: data.Content,
		DictId:  data.DictId,
	})
	if err != nil {
		return res, nil, err
	}

	var sentenceFilter = common.SelectQueryParam{
		Fields: "*",
		Limit:  1,
		Offset: 0,
		Order:  "id DESC",
		Filter: common.Filter{
			Filter: map[string]interface{}{
				"-eq": map[string]interface{}{
					"condition": "id = ?",
					"val":       res.Id,
				},
			},
		},
	}
	_, err = LexicalAnalysisSentence(&sentenceFilter, data.DictId, 1, false, data.AnalysisSentiment, data.AnalysisKeyword)

	if err != nil {
		//分析出错 删除添加记录
		deleteFilter := common.Filter{Filter: map[string]interface{}{"-eq": map[string]interface{}{"condition": "id = ?", "val": res.Id}}}
		DeleteSentence(&deleteFilter)
		return res, nil, err
	}
	result = db.Preload("SentenceWords").Preload("SentenceWords.WordSpeech").Where(model.Sentence{
		Id: res.Id,
	}).Take(&res)
	if result.Error != nil {
		return res, nil, err
	}
	if len(data.AnalysisSimilarWordPos) > 0 {
		querySimilarWOrdIdArray := make([]uint64, 0)
		for _, word := range res.SentenceWords {
			for _, pos := range data.AnalysisSimilarWordPos {
				if word.Pos != pos {
					continue
				}
				querySimilarWOrdIdArray = append(querySimilarWOrdIdArray, word.Id)
			}
		}
		if len(querySimilarWOrdIdArray) > 0 {
			wordsMap, err := QuerySimilarWords(common.SelectQueryParam{
				Fields: "*",
				Limit:  2000,
				Offset: 0,
				Filter: common.Filter{Filter: map[string]interface{}{
					"-in": map[string]interface{}{
						"condition": "id in ?",
						"val":       querySimilarWOrdIdArray,
					},
				},
				},
			}, 20, false)
			if err == nil {
				for index, word := range res.SentenceWords {
					if similarWord, ok := wordsMap[word.Id]; ok {
						SimilarWordsByte, err := json.Marshal(similarWord)
						if err != nil {
							continue
						}
						res.SentenceWords[index].SimilarWords = SimilarWordsByte
					}
				}
			}

		}

	}

	// 对比本地词库 匹配对应的资产
	matchEntities := map[string]interface{}{}
	if data.CompareLocalCustomTag {
		matchEntities = MatchLocalEntities(res.SentenceWords, data.DictId, data.SpecifiedMatchEntityType, data.MatchNumber)
	}

	return res, matchEntities, err
}

func matchShots(words []string, limit int) []model.Shot {
	var matchShots []model.Shot
	var shots []model.Shot
	for _, word := range words {
		matchShots = []model.Shot{}
		result := Mysql.DB.Where(&model.Shot{}).Where("FIND_IN_SET(?,custom_tag) or JSON_CONTAINS(JSON_EXTRACT(tag_similar_words,'$'),'\""+word+"\"')", word, word).Limit(limit).Find(&matchShots)
		if result.Error != nil {
			continue
		}
		if len(matchShots) > 0 {
			shots = append(shots, matchShots...)
		}
	}
	return shots
}

func matchActions(words []string, limit int) []model.Action {
	var matchActions []model.Action
	var actions []model.Action
	for _, word := range words {
		matchActions = []model.Action{}
		result := Mysql.DB.Where(&model.Action{}).Where("FIND_IN_SET(?,custom_tag) or JSON_CONTAINS(JSON_EXTRACT(tag_similar_words,'$'),'\""+word+"\"')", word, word).Limit(limit).Find(&matchActions)
		if result.Error != nil {
			continue
		}
		if len(matchActions) > 0 {
			actions = append(actions, matchActions...)
		}
	}
	return actions
}

func matchLights(words []string, limit int) []model.Light {
	var lights []model.Light
	var matchLights []model.Light
	for _, word := range words {
		matchLights = []model.Light{}
		result := Mysql.DB.Where(&model.Light{}).Where("FIND_IN_SET(?,custom_tag) or JSON_CONTAINS(JSON_EXTRACT(tag_similar_words,'$'),'\""+word+"\"')", word, word).Limit(limit).Find(&matchLights)
		if result.Error != nil {
			continue
		}
		if len(matchLights) > 0 {
			lights = append(lights, matchLights...)
		}
	}
	return lights
}

// 对比本地词库 匹配对应的资产
func MatchLocalEntities(SentenceWords []model.SentenceWord, dictId uint64, specifiedMatchEntityType []string, matchNumber int) map[string]interface{} {

	matchEntities := map[string]interface{}{}
	words := sentenceWordFilter(SentenceWords)

	if matchNumber <= 0 {
		matchNumber = 20
	}
	if len(specifiedMatchEntityType) == 0 {
		specifiedMatchEntityType = []string{"shot", "light", "action"}
	}
	// 当指定匹配类型
	if len(specifiedMatchEntityType) > 0 {
		for _, s := range specifiedMatchEntityType {
			switch s {
			case "shot":
				shots := matchShots(words, matchNumber)
				if len(shots) > 0 {
					matchEntities["shots"] = shots
				}
				break
			case "light":
				lights := matchLights(words, matchNumber)
				if len(lights) > 0 {
					matchEntities["lights"] = lights
				}
				break
			case "action":
				actions := matchActions(words, matchNumber)
				if len(actions) > 0 {
					matchEntities["actions"] = actions
				}
				break
			}
		}
	}

	return matchEntities
}

func sentenceWordFilter(words []model.SentenceWord) []string {
	resultWords := make([]string, 0)
	filterFlag := false

	forbiddenPos := []string{"w", "wb", "wd", "wf", "wh", "wj", "wky", "wkz", "wm", "wn", "wp", "ws", "wt", "ww", "wyy", "wyz", "x", "xu", "xx"}

	for _, word := range words {
		filterFlag = false
		for _, pos := range forbiddenPos {
			if word.Pos == pos {
				filterFlag = true
				continue
			}
		}
		if filterFlag {
			continue
		}
		resultWords = append(resultWords, word.Word)
	}
	return resultWords

}
