package queue

import (
	"encoding/json"
	"fmt"
	"gin/common"
	"gin/model"
	nlpTcDict "gin/tai/dict"
	"gin/tai/dict/word"
	nlpTcDictWord "gin/tai/dict/word"
	nlpTcSentence "gin/tai/sentence"
	"github.com/Gre-Z/common/jtime"
	nlp "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/nlp/v20190408"
	"time"
)

type Job interface {
	Process() error
	HandleError(error)
}

type QuerySimilarWordsJob struct {
	SentenceWordId uint64
	Number         uint64
}

type LexicalAnalysisSentenceJob struct {
	SentenceId uint64
	DictId     uint64
}

type SentimentAnalysisSentenceJob struct {
	SentenceId uint64
}

type PullDictJob struct {
	DictList []*nlp.DictInfo
}

type PullDictWordJob struct {
	Dict model.Dict
}

type KeywordsExtractionSentenceJob struct {
	SentenceId uint64
	Number     uint64
}

// 查看相似词
func (qsw *QuerySimilarWordsJob) Process() (err error) {
	println(qsw.SentenceWordId)
	var sentenceWordModel model.SentenceWord
	sentenceWordModel, err = sentenceWordModel.FindById(qsw.SentenceWordId)

	if err != nil {
		println(err.Error())
		return
	}
	var response *nlp.SimilarWordsResponse

	response, err = word.Similar(sentenceWordModel.Word, 200)

	if err != nil {
		return
	}

	var wordModel model.Word
	existRecord, err := wordModel.Find(common.SelectQueryParam{
		Fields: "*",
		Limit:  1,
		Offset: 0,
		Order:  "id desc",
		Filter: common.Filter{
			map[string]interface{}{
				"-eq": map[string]interface{}{
					"condition": "word = ?",
					"val":       sentenceWordModel.Word,
				},
			},
		},
	})

	// 没有保存过该词语的就写入
	if err != nil || existRecord.Id <= 0 {
		wordModel.Word = sentenceWordModel.Word
		id, err := wordModel.Insert()
		if err != nil {
			return err
		}
		var similarModel model.WordSimilar
		similarLength := len(response.Response.SimilarWords)
		similarWordModels := make([]model.WordSimilar, similarLength)

		for index, similarWord := range response.Response.SimilarWords {
			similarWordModels[index].Word = *similarWord
			similarWordModels[index].WordId = id
			similarWordModels[index].SourceWord = sentenceWordModel.Word
		}
		similarModel.InsertAll(&similarWordModels)
	}
	storeWords := response.Response.SimilarWords

	if uint64(len(response.Response.SimilarWords)) >= qsw.Number {
		storeWords = response.Response.SimilarWords[:qsw.Number]
	}

	var SimilarWordsByte []byte

	SimilarWordsByte, err = json.Marshal(storeWords)
	if err != nil {
		return err
	}
	sentenceWordModel.SimilarWords = SimilarWordsByte

	_, err = sentenceWordModel.Update(model.SentenceWord{
		Id:           sentenceWordModel.Id,
		SimilarWords: SimilarWordsByte,
	})
	if err != nil {
		return err
	}

	return
}

func (qsw *QuerySimilarWordsJob) HandleError(err error) {
	fmt.Printf("\033[1;37;41m%s\033[0m\n", err.Error())
}

// 分词
func (job *LexicalAnalysisSentenceJob) Process() (err error) {

	var sentenceModel model.Sentence
	sentenceModel, err = sentenceModel.FindById(job.SentenceId)

	if err != nil {
		println(err.Error())
		return
	}
	var dictModel model.Dict

	if job.DictId > 0 {
		err = model.FindById(&dictModel, job.DictId, nil)
	}

	response, err := nlpTcDict.LexicalAnalysis(dictModel.DictId, sentenceModel.Content)

	if err != nil {
		return
	}

	var sentenceWordModel = new(model.SentenceWord)
	var deleteCondition = fmt.Sprintf("sentence_id =  %d", sentenceModel.Id)
	sentenceWordModel.DeleteAll(deleteCondition)
	var sentenceNerTokenModel = new(model.SentenceNerToken)
	sentenceNerTokenModel.DeleteAll(deleteCondition)

	if len(response.Response.PosTokens) > 0 {
		var sentenceWordInsert []model.SentenceWord
		for _, posToken := range response.Response.PosTokens {
			sentenceWordInsert = append(sentenceWordInsert, model.SentenceWord{
				BeginOffset: *posToken.BeginOffset,
				Word:        *posToken.Word,
				Length:      *posToken.Length,
				Pos:         *posToken.Pos,
				SentenceId:  sentenceModel.Id,
				Created:     jtime.TstampTime(time.Now().Unix()),
			})
		}
		sentenceWordModel.InsertAll(&sentenceWordInsert)
	}
	var sentenceNerTokeModel = new(model.SentenceNerToken)

	if len(response.Response.NerTokens) > 0 {
		nerTokenByte, jsonErr := json.Marshal(response.Response.NerTokens)
		if jsonErr != nil {
			return jsonErr
		}

		_, err := sentenceModel.Update(model.Sentence{
			Id:       sentenceModel.Id,
			NerToken: nerTokenByte,
		})
		if err != nil {
			return nil
		}

		var sentenceNerTokeInsert []model.SentenceNerToken
		for _, nerToken := range response.Response.NerTokens {
			sentenceNerTokeInsert = append(sentenceNerTokeInsert, model.SentenceNerToken{
				BeginOffset: *nerToken.BeginOffset,
				Word:        *nerToken.Word,
				Length:      *nerToken.Length,
				Type:        *nerToken.Type,
				SentenceId:  sentenceModel.Id,
				Created:     jtime.TstampTime(time.Now().Unix()),
			})
		}
		sentenceNerTokeModel.InsertAll(&sentenceNerTokeInsert)
	}
	_, err = sentenceModel.Update(model.Sentence{
		Id:         sentenceModel.Id,
		IsAnalysis: "yes",
	})

	return
}

func (job *LexicalAnalysisSentenceJob) HandleError(err error) {
	fmt.Printf("\033[1;37;41m%s\033[0m\n", err.Error())

}

// 情感分析
func (job *SentimentAnalysisSentenceJob) Process() (err error) {
	var sentenceModel model.Sentence
	sentenceModel, err = sentenceModel.FindById(job.SentenceId)

	if err != nil {
		return
	}
	response, nlpErr := nlpTcSentence.SentimentAnalysis(sentenceModel.Content)

	if nlpErr != nil {
		return nlpErr
	}

	_, err = sentenceModel.Update(model.Sentence{
		Id:        sentenceModel.Id,
		Neutral:   *response.Response.Neutral,
		Positive:  *response.Response.Positive,
		Sentiment: *response.Response.Sentiment,
		Negative:  *response.Response.Negative,
	})

	return err
}

func (job *SentimentAnalysisSentenceJob) HandleError(err error) {
	fmt.Printf("\033[1;37;41m%s\033[0m\n", err.Error())

}
func (job *PullDictJob) Process() (err error) {

	for _, dictInfo := range job.DictList {

		dict := model.Dict{}
		err := model.Find(&dict, map[string]interface{}{
			"-eq": map[string]interface{}{
				"condition": "dict_id = ?",
				"val":       *dictInfo.Id,
			},
		}, []string{})

		if err == nil && dict.Id > 0 {
			continue
		}
		dict.Id = 0
		dict.DictId = *dictInfo.Id
		dict.Name = *dictInfo.Name
		dict.Description = *dictInfo.Description
		err = model.Insert(&dict)
		if err != nil {
			continue
		}

		// 添加一个同步词条的任务
		EnQueue(&PullDictWordJob{
			Dict: dict,
		})
	}

	return
}
func (job *PullDictJob) HandleError(err error) {
	fmt.Printf("\033[1;37;41m%s\033[0m\n", err.Error())

}

// 处理同步
func (job *PullDictWordJob) Process() (err error) {

	var dictWord model.DictWord
	dictWord.DictId = job.Dict.Id
	// 标记为此次不执行afterCreate
	// 不需要同步到腾讯云
	dictWord.DontNeedSync = true

	var limit uint64
	var offset uint64
	limit = 100
	var page uint64
	page = 1
	var keyword = ""
	var limitCompare int
	limitCompare = int(limit)
	for {
		offset = (page - 1) * limit
		res, err := nlpTcDictWord.Select(&job.Dict.DictId, &limit, &offset, &keyword)
		if err != nil {
			continue
		}
		if len(res.Response.WordItems) == 0 {
			break
		}
		for _, dictWordItem := range res.Response.WordItems {
			dictWord.Id = 0
			dictWord.Word = *dictWordItem.Text
			dictWord.Pos = *dictWordItem.Pos
			_, err = dictWord.Insert()
			if err != nil {
				continue
			}
		}

		if len(res.Response.WordItems) < limitCompare {
			break
		}
		page += 1

	}

	return
}

func (job *PullDictWordJob) HandleError(err error) {
	fmt.Printf("\033[1;37;41m%s\033[0m\n", err.Error())

}

func (job *KeywordsExtractionSentenceJob) Process() (err error) {

	println(fmt.Sprintf("running KeywordsExtractionSentenceJob %d  %d", job.SentenceId, job.Number))
	var sentenceModel model.Sentence
	sentenceModel, err = sentenceModel.FindById(job.SentenceId)

	if err != nil {
		return err
	}
	response, nlpErr := nlpTcSentence.KeywordsExtraction(&sentenceModel.Content, &job.Number)

	if nlpErr != nil {
		return nlpErr
	}

	if len(response.Response.Keywords) > 0 {

		keywordsByte, err := json.Marshal(response.Response.Keywords)
		if err != nil {
			return err
		}
		_, err = sentenceModel.Update(model.Sentence{
			Id:       sentenceModel.Id,
			Keywords: keywordsByte,
		})
		if err != nil {
			return err
		}

	}

	return err
}

func (job *KeywordsExtractionSentenceJob) HandleError(err error) {

	fmt.Printf("\033[1;37;41m%s\033[0m\n", err.Error())

}
