package kafka

import (
	"context"
	"encoding/json"
	"fmt"
	kafka "github.com/segmentio/kafka-go"
	"github.com/sirupsen/logrus"
	"github.com/tluo-github/super-runner/common"
	"strings"
	"time"
)

// 消费 Kafka 消息

var instance *KafkaHelper


type KafkaHelper struct {
	config common.KafkaConfig
	reader *kafka.Reader
	writer *kafka.Writer

}

func getKafkaReader(kafkaURL, topic, groupID string) *kafka.Reader {
	brokers := strings.Split(kafkaURL, ",")
	logrus.Info("kafka read brokers : ",kafkaURL)
	logrus.Info("kafka read topic: " ,topic)
	logrus.Info("kafka read groupId : ",groupID)
	return kafka.NewReader(kafka.ReaderConfig{
		Brokers:  brokers,
		GroupID:  groupID,
		Topic:    topic,
		MinBytes: 10e3, // 10KB
		MaxBytes: 10e6, // 10MB
	})
}

func getKafkaWriter(kafkaURL, topic string) *kafka.Writer {
	brokers := strings.Split(kafkaURL, ",")
	logrus.Info("kafka writer brokers : ",kafkaURL)
	logrus.Info("kafka writer topic : ",topic)
	return kafka.NewWriter(kafka.WriterConfig{
		Brokers:  brokers,
		Topic:    topic,
		Balancer: &kafka.LeastBytes{},
	})
}

func (kh *KafkaHelper) Close()  {
	kh.reader.Close()
	kh.writer.Close()
}

func (kh *KafkaHelper) ReadMessage() []byte{
	m, err := kh.reader.ReadMessage(context.Background())
	if err != nil {
		logrus.Errorln(err)
	}
	logrus.Printf("message at topic:%v partition:%v offset:%v %s = %s\n", m.Topic, m.Partition, m.Offset, string(m.Key), string(m.Value))
	return m.Value
}

func (kh *KafkaHelper) WriteBytes(bytes []byte) error {
	return kh.writer.WriteMessages(context.Background(),kafka.Message{
		Value: bytes,
	})
}

func (kh *KafkaHelper) WriteMessage(msg string) error {
	return kh.writer.WriteMessages(context.Background(),kafka.Message{
		Value: []byte(msg),
	})
}

// public
func GetKafkaHelper(config common.KafkaConfig) *KafkaHelper {

	if instance == nil {
		reader := getKafkaReader(config.Read.BrokerList, config.Read.Topic, config.Read.GroupID)
		writer := getKafkaWriter(config.Writer.BrokerList, config.Writer.Topic)
		instance = &KafkaHelper{
			config: config,
			reader: reader,
			writer: writer,
		}
	}
	return instance
}


type KafkaJobLogMsg struct {
	MetaData map[string]interface{}`json:"metadata,omitempty"`
	SuperRunnerName string `json:"super_runner_name"`
	Event   string		`json:"event"`
	MinioUrl string	   `json:"minio_url"`
	Timestamp   int64  `json:"timestamp"`
	ErrorMsg string `json:"error_msg"`
	IsSystemError bool `json:"is_system_error"`
}
func StartKafka(config common.KafkaConfig,name string,metaData map[string]interface{},super_runner_name string) {
	msg := KafkaJobLogMsg{
		MetaData:  metaData,
		Event:     "start",
		Timestamp: time.Now().Unix(),
		SuperRunnerName : super_runner_name,
	}
	bytes,_ := json.Marshal(&msg)

	logrus.WithFields(logrus.Fields{
		"name": name,
	}).Infoln("startKafka",string(bytes))
	GetKafkaHelper(config).WriteBytes(bytes)
}
func SucceededKafka(config common.KafkaConfig,name string,metaData map[string]interface{},url string) {
	msg := KafkaJobLogMsg{
		MetaData:  metaData,
		Event:     "ok",
		MinioUrl:  url,
		Timestamp: time.Now().Unix(),
	}
	bytes,_ := json.Marshal(&msg)
	logrus.WithFields(logrus.Fields{
		"name": name,
	}).Infoln("succeededKafka",string(bytes))
	GetKafkaHelper(config).WriteBytes(bytes)

}
func ErrorKafka(config common.KafkaConfig,name string,metaData map[string]interface{},url string, error error,IsSystemError bool) {
	str := fmt.Sprintf("with error:%v",error)
	str = strings.Replace(str,"\""," " ,-1)
	str = strings.Replace(str,":"," " ,-1)

	msg := KafkaJobLogMsg{
		MetaData:  metaData,
		Event:     "error",
		MinioUrl:  url,
		Timestamp: time.Now().Unix(),
		ErrorMsg: str,
		IsSystemError: IsSystemError,
	}
	bytes,_ := json.Marshal(&msg)
	logrus.WithFields(logrus.Fields{
		"name": name,
	}).Infoln("errorKafka",string(bytes))
	GetKafkaHelper(config).WriteBytes(bytes)
}