package kafka

import (
	log "github.com/sirupsen/logrus"

	"github.com/Shopify/sarama"
)

type ProcucerConf struct {
	Address  []string
	User     string
	Password string
	Ack      string // 默认只有leader节点
	Topic    string
	Key      string //如果想写到同一个partition，保证时序，key相同即可
}

//同步客户端
type SyncProducer struct {
	Conf   ProcucerConf
	Client sarama.SyncProducer
}

func (s *SyncProducer) InitSyncProducer() (error) {
	config := sarama.NewConfig()
	if s.Conf.Ack == "ALL" {
		config.Producer.RequiredAcks = sarama.WaitForAll //所有节点返回
	} else if s.Conf.Ack == "NONE" {
		config.Producer.RequiredAcks = sarama.NoResponse //不需要返回
	} else {
		config.Producer.RequiredAcks = sarama.WaitForLocal //leader节点返回

	}
	// config.Producer.Partitioner = sarama.NewRandomPartitioner
	config.Net.SASL.Enable = true
	config.Net.SASL.Handshake = true
	config.Net.SASL.User = s.Conf.User
	config.Net.SASL.Password = s.Conf.Password
	config.Producer.Return.Successes = true
	client, err := sarama.NewClient(s.Conf.Address, config)
	if err != nil {
		log.Println("unable to create kafka client:", err)
		return err
	}
	producer, err := sarama.NewSyncProducerFromClient(client)
	s.Client = producer
	return err

}
func (s *SyncProducer) Write(p []byte) (partition int32, offset int64, err error) {
	msg := &sarama.ProducerMessage{}
	msg.Topic = s.Conf.Topic
	//确保msg顺序，可指定key，写到同一个partition
	if s.Conf.Key != "" {
		msg.Key = sarama.StringEncoder(s.Conf.Key)
	}
	msg.Value = sarama.ByteEncoder(p)
	partition, offset, err = s.Client.SendMessage(msg)
	return
}

//异步客户端
type AsyncProducer struct {
	Conf   ProcucerConf
	Client sarama.AsyncProducer
}

func (s *AsyncProducer) InitAsyncProducer() (err error) {
	config := sarama.NewConfig()
	// config.Producer.RequiredAcks = sarama.WaitForAll
	// config.Producer.Partitioner = sarama.NewRandomPartitioner
	config.Net.SASL.Enable = true
	config.Net.SASL.Handshake = true
	config.Net.SASL.User = s.Conf.User
	config.Net.SASL.Password = s.Conf.Password

	client, err := sarama.NewClient(s.Conf.Address, config)
	if err != nil {
		log.Fatalf("unable to create kafka client: %q", err)
	}
	producer, err := sarama.NewAsyncProducerFromClient(client)
	s.Client = producer
	return err
}

//同步写
func NewKafkaSync(address []string, topic, user, password string, key string, ack string) (producer SyncProducer, err error) {
	kconf := ProcucerConf{
		Address:  address,
		User:     user,
		Password: password,
		Topic:    topic,
		Key:      key,
		Ack:      ack,
	}
	producer = SyncProducer{Conf: kconf}
	err = producer.InitSyncProducer()
	return producer, err
}

//异步写
func NewKafkaAsync(address []string, topic, user, password string) (producer AsyncProducer, err error) {
	kconf := ProcucerConf{
		Address:  address,
		User:     user,
		Password: password,
		Topic:    topic,
	}
	producer = AsyncProducer{Conf: kconf}
	err = producer.InitAsyncProducer()
	return
}
