package client

import (
	"encoding/json"
	"errors"
	"fmt"
	conf "gitlab.sensechar.com/kafkaGo/config"
	"time"

	"github.com/Shopify/sarama"
)

type KafkaProducerClient struct {
	// 集群节点
	BrokerServers []string
	// 同步
	SyncProducer  sarama.SyncProducer
	// 异步
	AsyncProducer sarama.AsyncProducer
	// 配置文件
	Config        *sarama.Config
}

// 配置参考文献 https://www.cnblogs.com/hongjijun/p/13584373.html
// 同步
func NewSynKafkaClient(option *conf.Option) *KafkaProducerClient {
	config := sarama.NewConfig()
	// 版本
	config.Version = sarama.V0_11_0_2

	// broker端压缩算法与produce保持一致 (压缩的速度上lz4=snappy<gzip)
	config.Producer.Compression = sarama.CompressionSnappy

	// 重试次数
	config.Producer.Retry.Max  = 0
	if option.Retry > 0 {
		config.Producer.Retry.Max = option.Retry
		config.Producer.Retry.Backoff = time.Duration(option.RetryMs)
	}

	//Wait For All
	config.Producer.RequiredAcks = sarama.WaitForAll

	// flush batches every 10ms
	config.Producer.Flush.Frequency = 500 * time.Millisecond		//如果数据迟迟未达到 batch.size，sender 等待 linger.time 之后就会发送数据。
	config.Producer.Flush.MaxMessages = 1 << 29

	config.Producer.Timeout = 3*time.Second

	// hash分区根据的消息key
	config.Producer.Partitioner = sarama.NewHashPartitioner

	// 设置消息幂等性
	config.Producer.Idempotent = true
	// 设置了消息幂等性后，此参数只能设置1
	config.Net.MaxOpenRequests = 1

	// 同步,需要开启消息发送成功后通知true
	config.Producer.Return.Successes = true

	//sarama.MaxRequestSize = 104857600

	// sasl认证
	if option.SASLUsername != "" || option.SASLPassword != "" {
		config.Net.SASL.Enable = true
		config.Net.SASL.User = option.SASLUsername
		config.Net.SASL.Password = option.SASLPassword
	}

	kafkaClient :=&KafkaProducerClient{
		BrokerServers: option.Brokers,
		Config:        config,
	}
	if err:=kafkaClient.SysProducer();err!=nil{
		panic(fmt.Sprintf("kafka sys err:%s\n",err))
	}
	return kafkaClient
}

func (k *KafkaProducerClient)SysProducer() (err error) {
	k.SyncProducer,err= sarama.NewSyncProducer(k.BrokerServers, k.Config)
	if err!=nil{
		return
	}
	return
}

func (k *KafkaProducerClient) BathSysPublish(data ...KafkaMessage) error {
	if len(data) == 0 {
		return errors.New("data is empty")
	}
	var messages []*sarama.ProducerMessage
	for _, v := range data {
		key, topic, message := k.analysis(v)
		messageByte, err := json.Marshal(message)
		if err != nil {
			continue
		}
		messages = append(messages, &sarama.ProducerMessage{
			Topic: topic,
			Key:   sarama.ByteEncoder(key),
			Value: sarama.ByteEncoder(messageByte),
		})
	}
	if len(messages) == 0 {
		return errors.New("message is empty")
	}
	return k.SyncProducer.SendMessages(messages)
}

func (k *KafkaProducerClient) analysis(data KafkaMessage) (string, string, Message) {
	// key值作用主要是hash 分区
	key := data.Key
	topic := data.Topic
	return key, topic, Message{
		Timestamp: data.Messages.Timestamp,
		Data:      data.Messages.Data,
	}
}

func (k *KafkaProducerClient) SysClose() {
	k.SyncProducer.Close()
}
