package output

import (
	"encoding/json"
	"kuafu/config"
	"kuafu/core/plugin"
	"kuafu/elastic"
	"kuafu/progress/log"
	"sync"
	"time"
)

var (
	mapping = `{
	"mappings":{
		"properties":{
			"time":{
				"type":"date",
                 "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
			},
			"logTime":{
				"type":"date",
                "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
			},
			"user":{
				"type":"keyword"
			},
			"host":{
				"type":"keyword"
			},
			"id":{
				"type":"keyword"
			},
			"queryTime":{
				"type":"double"
			},
			"lockTime":{
				"type":"double"
			},
			"rowsSent":{
				"type":"integer"
			},
			"rowsExamined":{
				"type":"integer"
			},
			"sql":{
				"type":"text",
                "analyzer":"ik_smart"
			},
			"instance":{
				"type":"keyword"
			}
		}
	}
   }`
)

type EsPlugin struct {
	esClient     *elastic.EsClient
	batch        []*plugin.LogEntry
	lastSaveTime time.Time
	config       *config.AppConfig
	lock         *sync.Mutex
}

func Create(config *config.AppConfig) *EsPlugin {
	esPlugin := new(EsPlugin)
	esPlugin.config = config
	esPlugin.esClient = elastic.Create(config)
	esPlugin.lastSaveTime = time.Now()
	esPlugin.batch = make([]*plugin.LogEntry, 0)
	esPlugin.lock = &sync.Mutex{}

	//开启定时刷新
	go refresh(esPlugin)

	return esPlugin
}

func (esPlugin *EsPlugin) Consume(entry *plugin.LogEntry) {

	//添加数据
	esPlugin.batch = append(esPlugin.batch, entry)

	//如果批量容量超过50，也保存
	if len(esPlugin.batch) >= esPlugin.config.Flush.FLushMaxSize {
		saveEntry(esPlugin)
	}

}

func saveEntry(esPlugin *EsPlugin) {

	esPlugin.lock.Lock()

	if len(esPlugin.batch) == 0 {
		//上一次时间置为当前时间
		esPlugin.lastSaveTime = time.Now()
		esPlugin.lock.Unlock()
		return
	}

	now := time.Now()

	for true {
		entry := esPlugin.batch[0]
		var indexName = esPlugin.config.Es.EsPreName + time.Time(entry.LogTime).Format("200601")
		exist, err := esPlugin.esClient.Exist(indexName)
		if err != nil {
			log.Log.Errorf("elastic Exist error: %s", err)
			time.Sleep(time.Second * 5)
			continue
		}
		if !exist {
			err = esPlugin.esClient.CreatIndex(indexName, mapping)
			if err != nil {
				log.Log.Errorf("elastic CreatIndex error: %s", err)
				time.Sleep(time.Second * 5)
				continue
			}
		}

		dataSlice := make([]string, 0)
		for _, data := range esPlugin.batch {
			dataByte, errJson := json.Marshal(data)
			if errJson != nil {
				log.Log.Errorf("json转换失败: %s", data.Time)
			}
			dataSlice = append(dataSlice, string(dataByte))
		}

		_, err = esPlugin.esClient.BulkAdd(indexName, dataSlice)
		if err != nil {
			log.Log.Errorf("elastic BulkAdd error: %s", err)
			time.Sleep(time.Second * 5)
			continue
		}
		break
	}

	//重置切片
	esPlugin.batch = esPlugin.batch[0:0]
	//上一次时间置为当前时间
	esPlugin.lastSaveTime = now

	esPlugin.lock.Unlock()
}

func refresh(esPlugin *EsPlugin) {
	for true {
		now := time.Now()
		//如果距离上一次时间大于间隔时间，有数据就立即保存
		if int(now.Sub(esPlugin.lastSaveTime).Seconds()) >= esPlugin.config.Flush.FLushMaxInterval {
			saveEntry(esPlugin)
		}
		time.Sleep(time.Second)
	}
}
