package mq

import (
	"sync"
	"time"

	"github.com/Shopify/sarama"
	"github.com/sirupsen/logrus"
)

type Producer struct {
	Brokers       []string
	AsyncProducer sarama.AsyncProducer //异步生产者
}

var (
	insProducer  *Producer
	onceProducer sync.Once
)

func NewKafkaProducer(brokers []string, user, passwd string) *Producer {
	onceProducer.Do(func() {
		config := initConfig()
		if passwd != "" {
			config.Net.SASL.Enable = true
			config.Net.SASL.User = user
			config.Net.SASL.Password = passwd
		}
		producer, err := sarama.NewAsyncProducer(brokers, config)
		if err != nil {
			logrus.Error(err.Error(), " brokers ", brokers, " user ", user)
			panic(err)
		}
		insProducer = &Producer{
			Brokers:       brokers,
			AsyncProducer: producer,
		}
	})
	return insProducer
}

func (p *Producer) SimpleProducer() sarama.AsyncProducer {
	producer := p.AsyncProducer

	go func(producer sarama.AsyncProducer) {
		errors := producer.Errors()
		successes := producer.Successes()
		for {
			select {
			case err := <-errors:
				if err != nil {
					logrus.Error("kafka produce error", "error", err)
				}
			case <-successes:
			}
		}
	}(producer)

	return producer
}

func (p *Producer) BuildMsgWithKey(topic, msg, key string) *sarama.ProducerMessage {
	return &sarama.ProducerMessage{
		Topic: topic,
		Key:   sarama.StringEncoder(key),
		Value: sarama.StringEncoder(msg),
	}
}

func (p *Producer) BuildMsg(topic, msg string) *sarama.ProducerMessage {
	return &sarama.ProducerMessage{
		Topic: topic,
		Value: sarama.StringEncoder(msg),
	}
}

func (p *Producer) Close() {
	if p != nil {
		err := p.AsyncProducer.Close()
		if err != nil {
			logrus.Error(err.Error(), " async producer close err")
		}
	}
}

func initConfig() *sarama.Config {
	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForAll
	config.Producer.Partitioner = sarama.NewRoundRobinPartitioner // 新选出一个partition
	config.Version = sarama.V0_10_1_0
	//config.Producer.Return.Successes = true 异步的生产者不开启
	config.Producer.Timeout = 5 * time.Second
	config.Producer.Retry.Max = 10
	// kafka version 0.10.0 or later can use this
	config.Producer.Compression = sarama.CompressionLZ4
	// prefer 4M or bigger batch to commit
	config.Producer.Flush.Bytes = 1024 * 1024 * 4
	// 5sec frequency to flush batch
	config.Producer.Flush.Frequency = time.Second
	// config.Producer.Compression = sarama.CompressionGZIP
	config.Net.KeepAlive = time.Second * 5

	return config
}
