package client

import (
	"context"
	"encoding/json"
	"fmt"
	"github.com/Shopify/sarama"
	conf "gitlab.sensechar.com/kafkaGo/config"
	"time"
)

type SubKafkaProducerClient struct {
	KafkaProducerClient
	// 消息错误管道
	errorChannel   chan *MessageError
	// 消息成功
	messageSucess  chan sarama.ProducerMessage
	// 上下文
	cancel        context.CancelFunc
}

// 异步
func NewAynKafkaClient(option *conf.Option) *SubKafkaProducerClient {
	config := sarama.NewConfig()
	// 版本
	config.Version = sarama.V0_11_0_2

	// broker端压缩算法与produce保持一致 (压缩的速度上lz4=snappy<gzip)
	config.Producer.Compression = sarama.CompressionSnappy

	// 重试次数
	config.Producer.Retry.Max  = 0
	if option.Retry > 0 {
		config.Producer.Retry.Max = option.Retry
		config.Producer.Retry.Backoff = time.Duration(option.RetryMs)
	}

	//Wait For All
	config.Producer.RequiredAcks = sarama.WaitForAll

	// flush batches every 10ms
	config.Producer.Flush.Frequency = 500 * time.Millisecond		//如果数据迟迟未达到 batch.size，sender 等待 linger.time 之后就会发送数据。
	config.Producer.Flush.MaxMessages = 1 << 29

	config.Producer.Timeout = 3*time.Second

	// hash分区根据的消息key
	config.Producer.Partitioner = sarama.NewHashPartitioner

	// 设置消息幂等性
	config.Producer.Idempotent = true
	// 设置了消息幂等性后，此参数只能设置1
	config.Net.MaxOpenRequests = 1

	// 同步,需要开启消息发送成功后通知true
	config.Producer.Return.Successes = true
	config.Producer.Return.Errors    = true

	//sarama.MaxRequestSize = 104857600

	// sasl认证
	if option.SASLUsername != "" || option.SASLPassword != "" {
		config.Net.SASL.Enable = true
		config.Net.SASL.User = option.SASLUsername
		config.Net.SASL.Password = option.SASLPassword
	}

	kafkaClient :=&SubKafkaProducerClient{
		KafkaProducerClient: KafkaProducerClient{
			BrokerServers: option.Brokers,
			Config:        config,
		},
	}
	if err:=kafkaClient.AsyProducer();err!=nil{
		panic(fmt.Sprintf("kafka sys err:%s\n",err))
	}
	kafkaClient.errorChannel  = make(chan *MessageError,option.BuffSize)
	return kafkaClient
}

func (k *SubKafkaProducerClient)AsyProducer() (err error)  {
	k.AsyncProducer,err = sarama.NewAsyncProducer(k.BrokerServers,k.Config)
	if err!=nil{
		return
	}
	return
}

func (k *SubKafkaProducerClient) AsyncPublish(data KafkaMessage) (err error) {
	key, topic, message := k.analysis(data)
	messageByte, err := json.Marshal(message)
	if err != nil {
		return
	}
	msg:=&sarama.ProducerMessage{
		Topic: topic,
		Key:   sarama.ByteEncoder(key),
		Value: sarama.ByteEncoder(messageByte),
	}
	k.AsyncProducer.Input() <- msg
	return
}


func (k *SubKafkaProducerClient) AsyClose()  {
	defer k.AsyncProducer.Close()
	k.cancel()
}
