package kafkautil

import (
	"github.com/bsm/sarama-cluster"
	"github.com/Shopify/sarama"
	"strings"
	"gitee.com/vzr/gearutil/log"
	"os"
	"os/signal"
)

// ReceiveCollectMessage 启动多个消费者消费消息
func ReceiveCollectMessage(server string, topic string, group string, consumer int, task func(msg []byte)) {
	for i := 0; i < consumer; i++ {
		go ConsumeMsg(server, group, topic, func(msg *sarama.ConsumerMessage) {
			go task(msg.Value[:])
		})
	}
}

// ConsumeMsg 消费指定Topic、Offset的消息
func ConsumeMsg(host string, groupID string, topic string, task func(*sarama.ConsumerMessage)) {
	// log.Println("偏移量：", offset)
	config := cluster.NewConfig()
	config.Consumer.Return.Errors = true
	config.Group.Return.Notifications = true

	// 对host进行截取
	hosts := strings.Split(host, ",")
	// util.Debug.Println(hosts)

	consumer, err := cluster.NewConsumer(hosts, groupID, []string{topic}, config)
	if err != nil {
		log.Err.Println(err)
	}
	defer consumer.Close()

	signals := make(chan os.Signal, 1)
	signal.Notify(signals, os.Interrupt)

	for {
		select {
		case msg, more := <-consumer.Messages():
			if more {
				// fmt.Fprintf(os.Stdout, "%s/%d/%d\t%s\t%s\n", msg.Topic, msg.Partition, msg.Offset, msg.Key, msg.Value)
				task(msg)
				consumer.MarkOffset(msg, "") // mark message as processed
			}
		case err, more := <-consumer.Errors():
			if more {
				log.Err.Println(err)
			}
		case ntf, more := <-consumer.Notifications():
			if more {
				log.Err.Printf("Rebalanced: %+v\n", ntf)
			}
		case <-signals:
			return
		}
	}
}
