package driver

import (
	"github.com/IBM/sarama"
	"github.com/spf13/viper"
	"log"
	"time"
	"yunque-message-client/config"
)

/**
 * @Description
 * @Author yichen
 * @Date 2023/9/24 0024 10:52
 **/
type KafkaDriver struct {
	Client *sarama.Client
	name   string
}

func NewKafkaDriver() *KafkaDriver {
	//v = New()
	return &KafkaDriver{}
}
func (this *KafkaDriver) SendTo(topic string, data []byte) error {
	Kafka := this.NewKafkaConsumer()
	if topic == "" {
		topic = this.getTopic()
	}
	err := Kafka.Push(topic, "mes", data)
	if err != nil {
		return err
	}
	return nil
}

func (*KafkaDriver) getTopic() string {
	return config.V().GetString("message.kafka.topicName")
}

func (this *KafkaDriver) Push(topic string, key string, value []byte) error {
	//log.Println("发送订阅【", idx, "】———————————————————!", time.Now())
	message := &sarama.ProducerMessage{
		Topic: topic,
		Key:   sarama.StringEncoder(key),
		Value: sarama.ByteEncoder(value),
	}
	c, err := sarama.NewSyncProducerFromClient(*this.Client)
	if err != nil {
		return err
	}
	defer func(c sarama.SyncProducer) {
		err := c.Close()
		if err != nil {

		}
	}(c)
	//partition, offset
	_, _, err2 := c.SendMessage(message)
	if err2 != nil {
		return err2
	}
	return nil
}
func (this *KafkaDriver) NewKafkaConsumer() *KafkaDriver {
	kafk := KafkaDriver{}
	err := kafk.NewKafka()
	if err != nil {
		return nil
	}
	return &kafk
}
func (this *KafkaDriver) NewKafka() error {

	config := sarama.NewConfig()
	config.Consumer.Return.Errors = true
	config.Consumer.Offsets.AutoCommit.Enable = false
	//sarama.Logger = log{}
	//cfg := sarama.NewConfig()
	//cfg.Version = sarama.V2_2_0_0
	//cfg.Producer.Return.Errors = true
	//cfg.Net.SASL.Enable = false
	//cfg.Producer.Return.Successes = true //这个是关键，否则读取不到消息
	//cfg.Producer.RequiredAcks = sarama.WaitForAll
	//cfg.Producer.Partitioner = sarama.NewManualPartitioner //允许指定分组
	//cfg.Consumer.Return.Errors = true
	//cfg.Consumer.Offsets.Initial = sarama.OffsetOldest
	////cfg.Group.Return.Notifications = true
	//cfg.ClientID = "service-exchange-api"
	config.Producer.RequiredAcks = sarama.WaitForAll        // 赋值为-1：这意味着producer在follower副本确认接收到数据后才算一次发送完成。
	config.Producer.Return.Successes = true                 //这个是关键，否则读取不到消息
	config.Producer.Partitioner = sarama.NewHashPartitioner // 对Key进行Hash，同样的Key每次都落到一个分区，这样消息是有序的
	//config.Consumer.Offsets.Initial = sarama.OffsetOldest
	//config.Producer.Partitioner =sarama.NewRandomPartitioner //写到随机分区中，默认设置8个分区
	client, err := sarama.NewClient(viper.GetStringSlice("subscribe.kafka.brokerIpPort"), config)
	//return sarama.NewClient(viper.GetStringSlice("kafka.brokerIpPort"), config)
	if err != nil {
		// 失败1秒后重新链
		time.Sleep(1 * time.Second)
		log.Println(err, "kafka 订阅，失败重连") // 这里的err其实就是panic传入的内容，55
		return this.NewKafka()
		//panic(err)
	}
	this.Client = &client
	return err
}
