package common

import (
	"context"
	"errors"
	"gitee.com/kinwyb/appTools/log"
	"github.com/Shopify/sarama"
	"github.com/sirupsen/logrus"
	"strings"
)

type KafkaMQ struct {
	ctx           context.Context
	cancel        context.CancelFunc
	addr          []string //服务器地址
	username      string   //账号
	password      string   //密码
	prefix        string   //topic名称前缀
	group         string
	log           *logrus.Entry               //日志
	clientConfig  *sarama.Config              //配置
	client        sarama.Client               //连接客户端
	producer      sarama.SyncProducer         //发布者
	consumerGroup sarama.ConsumerGroup        //consumerGroup
	msgHandler    map[string]*kafkaMsgHandler //消息处理
}

type kafkaMsgHandler struct {
	fun             func(msg *sarama.ConsumerMessage)
	topic           string
	startNew        bool
	partitionOffset map[int32]int64
}

// NewKafkaMQ 链接kafka
func NewKafkaMQ(addr string,
	username string,
	password string,
	group string,
	prefix string) (*KafkaMQ, error) {
	ret := &KafkaMQ{
		addr:       strings.Split(addr, ","),
		username:   username,
		password:   password,
		prefix:     prefix,
		group:      group,
		msgHandler: map[string]*kafkaMsgHandler{},
		log:        log.Tag("kafka"),
	}
	ret.ctx, ret.cancel = context.WithCancel(context.Background())
	ret.clientConfig = sarama.NewConfig()
	ret.clientConfig.Version = sarama.V1_0_0_0
	ret.clientConfig.Producer.RequiredAcks = sarama.WaitForAll
	ret.clientConfig.Producer.Partitioner = sarama.NewRandomPartitioner
	ret.clientConfig.Producer.Return.Successes = true
	var err error
	ret.client, err = sarama.NewClient(ret.addr, ret.clientConfig)
	if err != nil {
		return nil, err
	}
	return ret, nil
}

// 重新监听
func (a *KafkaMQ) restartSubscribe() error {
	if a.log != nil {
		a.log.Debug("kafka 重新subscribe")
	}
	if a.consumerGroup != nil {
		a.consumerGroup.Close()
		a.consumerGroup = nil
	}
	cg, err := sarama.NewConsumerGroup(a.addr, a.group, a.client.Config())
	if err != nil {
		return err
	}
	a.consumerGroup = cg
	var topics []string
	for topic := range a.msgHandler {
		topics = append(topics, topic)
	}
	if len(topics) > 0 {
		cs := &consumerHandler{
			handler: a.msgHandler,
			ready:   make(chan bool),
			log:     a.log,
		}
		if cs.log == nil {
			cs.log = log.Tag("kafka")
		}
		go func(cs *consumerHandler, sub sarama.ConsumerGroup, ctx context.Context) {
			err := sub.Consume(ctx, topics, cs)
			if err != nil {
				if a.log != nil {
					a.log.WithError(err).Error("kafka consumer group 监听错误")
				}
			}
		}(cs, a.consumerGroup, a.ctx)
		<-cs.ready
		if a.log != nil {
			a.log.Debugf("kafka consumer group [%s] 监听开始：%v", a.group, topics)
		}
	}
	return nil
}

// NewSubscribe 创建客户端
func (a *KafkaMQ) NewSubscribe(topic string, handler func(msg *sarama.ConsumerMessage), startNew bool) error {
	if handler == nil {
		return errors.New("处理函数不能为空")
	}
	topic = a.prefix + topic
	if _, ok := a.msgHandler[topic]; ok {
		return errors.New("该话题已有订阅")
	}
	mqHandler := &kafkaMsgHandler{
		fun:      handler,
		topic:    topic,
		startNew: startNew,
	}
	if startNew {
		partitions, e := a.client.Partitions(topic)
		if e == nil {
			for _, v1 := range partitions {
				offset, e := a.client.GetOffset(topic, v1, sarama.OffsetNewest)
				if e == nil {
					if mqHandler.partitionOffset == nil {
						mqHandler.partitionOffset = map[int32]int64{}
					}
					mqHandler.partitionOffset[v1] = offset
				}
			}
		}
	}
	a.msgHandler[topic] = mqHandler
	a.restartSubscribe()
	return nil
}

// UnSubscribe 创建客户端
func (a *KafkaMQ) UnSubscribe(topic string) error {
	topic = a.prefix + topic
	if _, ok := a.msgHandler[topic]; ok {
		delete(a.msgHandler, topic)
		a.restartSubscribe()
	}
	return nil
}

// Put 推送一个消息
func (a *KafkaMQ) Put(topic string, msg []byte) error {
	if a.producer == nil {
		p, err := sarama.NewSyncProducer(a.addr, a.client.Config())
		if err != nil {
			if a.log != nil {
				a.log.WithError(err).Error("kafka Producer 创建失败")
			}
			return err
		}
		a.producer = p
	}
	pMsg := &sarama.ProducerMessage{
		Topic: a.prefix + topic,
		Value: sarama.ByteEncoder(msg),
	}
	_, _, e := a.producer.SendMessage(pMsg)
	return e
}

func (a *KafkaMQ) Close() {
	if a.producer != nil {
		a.producer.Close()
	}
	if a.consumerGroup != nil {
		a.consumerGroup.Close()
	}
	if a.client != nil {
		a.client.Close()
	}
}

// Consumer represents a Sarama consumer group consumer
type consumerHandler struct {
	handler map[string]*kafkaMsgHandler
	ready   chan bool
	log     *logrus.Entry
}

// Setup is run at the beginning of a new session, before ConsumeClaim
func (consumer *consumerHandler) Setup(sesson sarama.ConsumerGroupSession) error {
	// Mark the consumer as ready
	consumer.ready <- true
	close(consumer.ready)
	consumer.log.Debug("consumerHandler.Setup")
	for _, v := range consumer.handler {
		for partition, offset := range v.partitionOffset {
			sesson.ResetOffset(v.topic, partition, offset, "")
		}
	}
	return nil
}

// Cleanup is run at the end of a session, once all ConsumeClaim goroutines have exited
func (consumer *consumerHandler) Cleanup(sarama.ConsumerGroupSession) error {
	return nil
}

// ConsumeClaim must start a consumer loop of ConsumerGroupClaim's Messages().
func (consumer *consumerHandler) ConsumeClaim(session sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error {
	consumer.log.Debug("consumerHandler.ConsumeClaim")
	for message := range claim.Messages() {
		if v, ok := consumer.handler[message.Topic]; ok {
			v.fun(message)
			if !v.startNew {
				session.MarkMessage(message, "")
			}
		}
	}
	return nil
}
